code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
# AngPoly3D
AngPoly3D is a Python package to calculate the angle between a reference orientation and a polyhedron orientation considering the polyhedron's point group symmetry.
The calculated angle is the minimum of all angles after applying all the equivalent orientations on the orientation of a polyhedron according to the formula prescribed in the [paper](https://pubs.rsc.org/en/content/articlehtml/2019/sm/c8sm02643b) by Sharon C. Glotzer et al.
Orientation of a polyhedron must be provided in [quaternion](https://en.wikipedia.org/wiki/Quaternion) format.
## Contributor
- [Sumitava Kundu](https://github.com/sumitavakundu007/), [IACS, Kolkata](http://www.iacs.res.in/).
## Installation
### Prerequisites
1. [python3 or higher](https://www.python.org/download/releases/3.0/)
2. [python3-numpy](https://numpy.org/)
3. [rowan](https://rowan.readthedocs.io/en/latest/)
#### Using PyPI
Use the package manager [pip](https://pip.pypa.io/en/stable/) to install AngPoly3D.
```bash
pip install AngPoly3D
```
#### Using source code
```bash
git clone https://github.com/sumitavakundu007/AngPoly3D.git
tar -xvf AngPoly3D-X.X.X
cd AngPoly3D-X.X.X
python3 setup.py install --user
```
## Usage
```python
from AngPoly3D import ang_poly3d_func
angles = ang_poly3d_func()
print(angles)
```
#### It will ask for few inputs to calculate the angles. You must provide the orientations, equivalent orientations and reference orientation in JSON format as following.
## sample_orientations.json
```bash
{
"orientations": [[0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]
}
```
## sample_equivalent_orientations.json
```bash
{
"equiv_orientations": [[1, 0, 0, 0]]
}
```
## sample_reference_orientation.json
```bash
{
"ref_orientation": [1, 0, 0, 0]
}
```
## Contributing
Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change.
Please make sure to update tests as appropriate.
## License
[MIT](https://choosealicense.com/licenses/mit/
|
AngPoly3D
|
/AngPoly3D-0.0.1.tar.gz/AngPoly3D-0.0.1/README.md
|
README.md
|
from pathlib import Path
import os
import subprocess
import pysam
################################################################################
def looper(input_dir, qualifier = "N.A"):
# Loop through files in a directory and return a list of contents
for file in os.listdir(input_dir):
if qualifier != "N.A":
if(qualifier in file):
yield file
elif qualifier == "N.A":
yield file
################################################################################
def back_mapper(input_dir, output_dir, reference, threads, delim, mapq, flag, coverage, min_alignment_length, softclip_perc, logger):
with open(reference) as ref_fasta:
seq_count = 0
for line in ref_fasta:
if(">" in line):
seq_count += 1
if(seq_count > 1):
logger.warning('''Multiple sequences detected in the reference fasta file. Please ensure that each reference file represents ONE DISTINCT genome.
If multiple genomes are used in a single reference file, then reads that align equally well will ONLY be assigned to a single reference.
This can and will skew the mapping results.
Consider using the --flag 0 option to correct this.''')
input_files = looper(input_dir, qualifier = "_R1")
# Index reference
# subprocess.call(f"bwa-mem2 index {reference}", shell = True)
for file in input_files:
sample_ID = file.split(delim)[0]
Path(output_dir, sample_ID).mkdir(exist_ok = True, parents = True)
fileR2 = file.replace("_R1", "_R2")
# Index reference
subprocess.run(["bwa-mem2", "index", reference])
if os.path.isfile(f"{input_dir}/{file}"):
r1_in = f"{input_dir}/{file}"
r2_in = f"{input_dir}/{fileR2}"
sample_out = f"{output_dir}/{sample_ID}/"
# Map raw reads
# subprocess.call(f"bwa-mem2 mem -t {threads} {reference} {r1_in} {r2_in} > {sample_out}{sample_ID}.sam", shell = True)
# subprocess.call(f"samtools view -q {mapq} -F {flag} -bS {sample_out}/{sample_ID}.sam > {sample_out}{sample_ID}.bam", shell = True)
# subprocess.call(f"samtools sort {sample_out}/{sample_ID}.bam > {sample_out}{sample_ID}_sort.bam", shell = True)
#Another one I'm unsure how to work out so I'll leave it be for now!
subprocess.call(f"bwa-mem2 mem -t {threads} {reference} {r1_in} {r2_in} | samtools view -q {mapq} -F {flag} -bS - | samtools sort - > {sample_out}{sample_ID}_sort.bam", shell = True)
subprocess.run(["samtools", "index", f"{sample_out}{sample_ID}_sort.bam"])
# Pysam filtering
samfile = pysam.AlignmentFile(f"{sample_out}{sample_ID}_sort.bam", "rb")
samfile_out = pysam.AlignmentFile(f"{sample_out}{sample_ID}_sort_pysam.bam", "wb", template = samfile)
sam_iterator = samfile.fetch()
for alignment in sam_iterator:
if int(alignment.query_alignment_length) >= int(min_alignment_length):
if (float(alignment.query_length) * float(softclip_perc)) >= (float(alignment.query_length) - float(alignment.query_alignment_length)):
samfile_out.write(alignment)
subprocess.call(f"samtools index {sample_out}{sample_ID}_sort_pysam.bam", shell = True)
subprocess.call(f"samtools idxstats {sample_out}{sample_ID}_sort_pysam.bam > {sample_out}{sample_ID}_stats.txt", shell = True)
# Calculate Coverage
if coverage == "Y":
subprocess.run(["average-coverage.py", f"{sample_out}{sample_ID}_sort_pysam.bam", "-o", f"{sample_out}{sample_ID}_coverage.tsv"])
logger.info(f"average-coverage.py {sample_out}{sample_ID}_sort_pysam.bam -o {sample_out}{sample_ID}_coverage.tsv")
logger.info("Back mapping completed.")
|
Angua-Luggage
|
/Angua_Luggage-0.0.9.tar.gz/Angua_Luggage-0.0.9/Angua_Luggage/AnguaUtils.py
|
AnguaUtils.py
|
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
from Bio.Blast import NCBIXML
from Bio import Entrez
import os
import subprocess
import csv
from shutil import move as shmove
from .utils import new_logger, count_calls, Cleanup
import yaml
from rpy2 import robjects as r
from collections import defaultdict
from .AnguaUtils import back_mapper
import importlib.resources
from . import data
#Angua_test needs changing to Python 3.9+ to use resources.files.
fasta_yaml = importlib.resources.open_binary(data, "fastaTool.yaml")
fastaTool_dict = yaml.full_load(fasta_yaml)
class seqHandler:
#Sets up a logger object.
_logger = new_logger(__name__)
#The seqHandler will store the file and folder names.
def __init__(self, fasta = "", xml = "", folder = "", folder_type = "misc"):
self.blast_file = False
self._folders = {}
self._pfam_tools = []
if fasta != "":
self.addFasta(fasta)
if xml != "":
self.blast_file = xml
self.blast_file_name = "".join(self.blast_file)[-1][:-4]
if folder != "":
self.addFolder(folder_type, folder)
#Allows adding a .fasta file to the seqHandler after init.
#Will also use SeqIO to parse the fasta into an object.
def addFasta(self, filename: str):
self.fasta_file = os.path.abspath(filename)
self.fasta_file_name = self.fasta_file.split("/")[-1][:-6]
self._seq_object = SeqIO.parse(self.fasta_file, "fasta")
self._seq_dict = SeqIO.to_dict(SeqIO.parse(self.fasta_file, "fasta"))
def addRma(self, filename: str, sortby = "contig"):
self._rma_tool = rmaTool(filename, sortby)
self._logger.info("Added: " + str(self._rma_tool))
def addFolder(self, folder_type: str, folder: str):
self._folders[folder_type] = folder
#Generator for a given folder - looks for the file end in the 'pointer' - the name of the folder, so the dict key.
#Defaults to fetching fasta files from the dict key labelled as contigs, which is the usual.
def getFiles(self, folder_type = "misc", file_end = ".fasta"):
for file in os.listdir(self._folders[folder_type]):
if file.endswith(file_end):
yield f"{self._folders[folder_type]}/{file}"
#Runs SRAToolkit on a file outputted by the SRA Run Selector. i.e. one accession per line txt file.
def fetchSRA(self, output_folder: str, SRA_file: str):
self.addFolder(folder = output_folder, folder_type = "raw")
with open(SRA_file, "r") as accessions:
for accession in accessions.readlines():
#Trying to move away from FastaKit logging to getting Angua to do it but unsure how to do that here.
self._logger.info(f"Fetching {accession}")
#.strip is added due to trailing newlines.
#https://blog.dalibo.com/2022/09/12/monitoring-python-subprocesses.html
cmd = ["fasterq-dump", "-p", "-S", "-O", output_folder, accession.strip()]
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) as proc:
errs = []
for line in proc.stderr:
self._logger.info(line.strip())
errs.append(line)
stdout, _ = proc.communicate()
#result = subprocess.CompletedProcess(cmd, proc.returncode, stdout, "\n".join(errs))
for file in self.getFiles(folder_type = "raw", file_end = ".fastq"):
subprocess.run(["pigz", "-9", file])
#In future I would like to find a way for this to check the filesize of the accessions against the memory available.
def renameSRA(self):
sample_num = 1
for file in self.getFiles(folder_type = "raw", file_end = "_1.fastq.gz"):
#I did this in two steps for readability, plus maybe this could be made into a function or whatever along the line.
#It splits off the filename with extension, then just grabs the filename sans extension, then finally snips off the underscore to just get the SRR--.
filename = file.split("/")[-1]
filename_no_extension = filename.split(".")[0]
samplename = filename_no_extension.split("_")[0]
#Uses the fact the SRR reads are stored under the 'raw' folder type in seqHandler. Extension is based on what Angua anticipates.
new_filename = f"{self._folders['raw']}/{samplename}_S{sample_num}_L001_R1_001.fastq.gz"
os.rename(file, new_filename)
#Same for second read. This just ensures the reads get the same sample name.
new_filename_R2 = new_filename.replace("R1", "R2")
file_R2 = file.replace("_1", "_2")
os.rename(file_R2, new_filename_R2)
#Last of all increment the sample number.
sample_num += 1
#Returns the sample number for logging purposes if so desired.
return sample_num
#Creates a fasta file from a list of NCBI accessions.
#Needs an email, api is optional but allows faster retrieval.
def fetchEntrezFastas(self, id_list: list, email: str, output: str, api = False, proxy = 3128):
#To help with FERA proxy shenanigans.
os.environ["https_proxy"] = f"http://webcache:{proxy}"
self.fasta_file = f"{output}/viruses.fasta"
Entrez.email = email
if api:
api_key = api
handle = Entrez.efetch(db = "nucleotide",
id = id_list,
rettype = "fasta")
sequences = SeqIO.parse(handle, "fasta")
with open(self.fasta_file, "wt") as fasta_file:
#SeqIO returns the count when it works, which is handy.
count = SeqIO.write(sequences, fasta_file, "fasta")
self._logger.info(f"{count} sequences found and written.")
#Checks if there's a Blast record attached to the seqHandler.
#If not, it will run a blastx search with the fastas it has attached.
#NOTE: Amend to also get blastp/n etc if needed. Maybe with a dict of functions.
def get_BlastRecord(self, search_term: str, minlength: int, get_all: bool):
if not self.blast_file:
self.blast_file = self.fasta_file[:-6] + "_blastx.xml"
self._logger.info("Performing blastx, please wait.")
subprocess.run("blastx", "-query", self.fasta_file,
"-db", "/nfs/bigbio_00/smcgreig/Blast_databases/nr/nr_db_27012022/nr",
"-num_alignments", "5",
"-num_threads", "12", "-outfmt", "5", "-out", self.blast_file)
# self._logger.info(f"Blastx generated with {blast_run}")
#Attaches a blastTool to the seqHandler. It keeps track of the filename and remember the .fasta.
self._blastTool = blastTool(self.blast_file)
self._blastTool.getAlignments(search_term, minlength, get_all)
#Generates a fasta tool for each hit in the blast record, using the inputted fasta file.
#Each fastaTool represents one contig.
def unpackBlastRecord(self):
if not self._blastTool:
self._logger.error("No blast record to unpack.")
return
unpacked_records = self._blastTool.unpackBlastRecord()
self._fasta_tools = [fastaTool(ID = record[0], frame = record[1], code = self._seq_dict[record[0]].seq) for record in unpacked_records]
#Makes a blastDB from either a manually inputted fasta file or the one attached to the seqHandler.
def makeBlastDb(self, outputdir: str, dbname = "db", fasta_path = ""):
if fasta_path == "":
fasta_path = self.fasta_file
subprocess.run(["makeblastdb",
"-in", fasta_path,
"-dbtype", "nucl",
"-input_type", "fasta",
"-out", f"{outputdir}/{dbname}"])
#Just an override so the seqHandler prints itself when implemented.
#Tools are not intended to be accessed without seqHandlers.
@count_calls
def outputCSV(self, outputdir: str, filename: str, add_text: str):
self._blastTool.outputCSV(outputdir, filename, add_text)
#Writes the fastaTools to a new fasta file. For example after TextSearch.
@count_calls
def outputFasta(self, outputdir: str, add_text: str):
# This list comprehension makes a new SeqRecord (i.e. single fasta entry)
# for each fastaTool attached to the seqHandler.
all_fastas = [SeqRecord(seq = tool.code,
id = tool.ID.split(" ")[0],
description = "_".join(tool.ID.split(" ")[1:])) for tool in self._fasta_tools]
output_file = f"{outputdir}/{self.fasta_file_name}_{add_text}.fasta"
with open(output_file, "w+") as output_handle:
SeqIO.write(all_fastas, output_handle, "fasta")
self._logger.info(output_file + " written.")
def outputFullLog(self, funcs: list):
func_dict = {"Fastas" : seqHandler.outputFasta,
"Alignments" : blastTool.getData,
"CSVs" : seqHandler.outputCSV}
to_log = [f"{func} processed: {func_dict[func].num_calls}" for func in funcs]
log_string = "\n".join(to_log)
self._logger.info(log_string)
def rmaToFasta(self, outputdir: str):
unpacked, sortedby = self._rma_tool.unpackInfo()
if sortedby == "contig":
self._fasta_tools = [fastaTool(ID = f"{contig}_{unpacked[contig][1]}", code = self._seq_dict[contig].seq) for contig in unpacked]
self.outputFasta(outputdir = outputdir, add_text = "_rma_by_contig")
elif sortedby == "virus":
for virus in unpacked:
virus_str = ("_").join(virus.split())
self._fasta_tools = [fastaTool(ID = f"{contig}_{virus}", code = self._seq_dict[contig].seq) for contig in unpacked[virus]]
self.outputFasta(outputdir = outputdir, add_text = f"{virus_str}_contigs")
else:
self._logger.error("No rma detected.")
return
@staticmethod
def splitBbduk(trimmed_dir) -> list:
for file in os.scandir(trimmed_dir):
if file.name.endswith("_R1.fastq.gz"):
filename = "".join(file.name.split(".")[-3])
R2_file = f"{filename.replace('_R1', '_R2')}.fastq.gz"
os.mkdir(f"{trimmed_dir}/{filename[:-3]}/")
shmove(f"{trimmed_dir}/{file.name}", f"{trimmed_dir}/{filename[:-3]}/{file.name}")
shmove(f"{trimmed_dir}/{R2_file}", f"{trimmed_dir}/{filename[:-3]}/{R2_file}")
@staticmethod
#Later seqHandler might be able to integrate the Angua outputs but for now, old fashioned way.
def backMapToBedGraph(trimmed_dir: str, out_dir: str, ref_file: str):
just_filename = "".join(ref_file.split("/")[-1])
ref_sample_name = "_".join(just_filename.split("_")[:2])
seqHandler.splitBbduk(trimmed_dir)
for d in os.listdir(trimmed_dir):
if d == ref_sample_name:
back_mapper(input_dir = f"{trimmed_dir}/{d}/", output_dir = out_dir, reference = ref_file, threads = 10, delim = "_", mapq = "0", flag = 2304, coverage = "Y", min_alignment_length = 50, softclip_perc = 1.0)
if os.path.exists(out_dir):
#Must bea better pathing.
for d in os.listdir(out_dir):
for seq in os.scandir(f"{out_dir}/{d}"):
if seq.name.endswith("_sort.bam"):
seqname = seq.name[:-4]
#https://blog.liang2.tw/posts/2016/01/plot-seq-depth-gviz/#convert-sequencing-depth-to-bedgraph-format
os.system(f"bedtools genomecov -bg -ibam {out_dir}/{d}/{seq.name} | gzip > {out_dir}/{d}/{seqname}.bedGraph.gz")
def runPfam(self, db_dir: str, outdirname: str, trimmed_dir: str, add_text = "_Pfam") -> str:
ORFs, fastas, grl = self.getORFs()
self.addFolder("pfam", f"{self._folders['contigs']}/{outdirname}")
outdir = self._folders["pfam"]
try:
os.mkdir(outdir)
except FileExistsError:
pass
for subfolder in os.scandir(self._folders["ORFs"]):
for file in os.listdir(subfolder):
if file.endswith("aa_.fasta"):
fasta_filename = file.split("/")[-1][:-14]
outfile = f"{outdir}/{fasta_filename}{add_text}.json"
self._pfam_tools.append(pfamTool(fasta_file = f"{self._folders['ORFs']}/{subfolder.name}/{file}",
db_dir = db_dir, outfile = outfile))
pfam_output = r.r['loop_json_files'](self._folders["pfam"], ORFs)
pfam_grl = pfam_output.rx2("pfam")
pfam_df = pfam_output.rx2("df")
for fasta in fastas:
fasta_name = os.path.basename(fasta)[:-6]
#sample_name = "".join(fasta_name.split("_")[6:])
seqHandler.backMapToBedGraph(trimmed_dir, f"{self._folders['contigs']}/{outdirname}/backmap/{fasta_name}", f"{self._folders['contigs']}/{fasta_name}.fasta")
Cleanup([self._folders["contigs"]],[".64", ".pac", ".fai", ".ann", ".amb", ".0123"])
r.r['generate_orf_plots'](grl, self._folders["contigs"], fastas, f"{self._folders['contigs']}/ORF_plots", pfam_grl, pfam_df, f"{self._folders['contigs']}/{outdirname}/backmap/")
def getORFs(self):
r_script = importlib.resources.open_binary(data, 'orfs.r')
r.r.source(r_script)
ORF_output = r.r['loop_fasta_files'](self._folders["contigs"], "Virus_ORFs", 150)
self.addFolder("ORFs", f"{self._folders['contigs']}/Virus_ORFs")
log = ORF_output.rx2("log")
ORFs = ORF_output.rx2("ORFs")
fastas = ORF_output.rx2("files")
grl = ORF_output.rx2("grl")
for i, _ in enumerate(log):
self._logger.info(log.rx2(i+1))
return ORFs, fastas, grl
class pfamTool(seqHandler):
def __init__(self, fasta_file, db_dir, outfile):
with open(outfile, "w") as output:
subprocess.run(["pfam_scan.pl", "-fasta", fasta_file, "-dir", db_dir, "-json", "pretty"], stdout = output)
self._json = outfile
class fastaTool(seqHandler):
__slots__ = ['code', 'frame', 'codedict', 'ID', "transl_protein"]
#I have to do this to make sure it's ordered.
dict_tuple = (fastaTool_dict["degeneratedna"],
fastaTool_dict["aminoacids"],
fastaTool_dict["degenerateacids"],
fastaTool_dict["complimentarityDNA"],
fastaTool_dict["complimentarityRNA"],
fastaTool_dict["standard_codons"])
degeneratedna, aminoacids, degenerateacids, complimentarityDNA, complimentarityRNA, standard_codons = dict_tuple
#Simple function to get the degeneracy of a given oligo.
def countDegeneracy(oligo):
multiply_list = [len(fastaTool.degeneratedna[letter]) for letter in oligo if letter in fastaTool.degeneratedna]
result = 1
for number in multiply_list:
result = result * number
return result
#Generates a set of all possible oligos for a degenerate primer, for use with Blast and so on.
def findDegeneratePrimers(oligo):
#Converts the primer into a graph of possible paths through the options.
graph, end_node_num = fastaTool.degenOligoToGraph(oligo)
#Previous function appends a number to the strings. It's a list because sometimes the first nucleotide is degenerate.
start_nodes = [node for node in graph if node.endswith("0")]
#If the last letter is a 'normal' nucleotide, this just makes sure it adds the last nucleotides on, otherwise it will stop at the last degenerate nucleotide.
if oligo[-1] in ["A", "G", "T", "C"]:
end_node_num += 1
#Finds the end nodes.
end_nodes = [oligo for edge in graph.values() for oligo in edge if oligo.endswith(str(end_node_num))]
all_paths = []
all_oligo_lists = []
#The below goes from start to end for all possible start-end combos.
for start in start_nodes:
for end in end_nodes:
all_paths.append(fastaTool.find_all_paths(graph, start, end))
for start_end_combo in all_paths:
#Nested because a start-end combination will have multiple possible paths through it. In theory this can just be one start, but there will usually be multiple ends.
for path in start_end_combo:
full_oligo = []
for seq in path:
#Remove the number used to keep the graph in order.
#https://www.studytonight.com/python-howtos/remove-numbers-from-string-in-python
new_seq = ''.join(filter(lambda x: not x.isdigit(), seq))
#Build out the oligo.
full_oligo.append(new_seq)
#Adds the full oligo (which is presently a list of nucleotides) to a running list of all possible options.
all_oligo_lists.append(full_oligo)
#Set to prevent duplicates. I might be able to refactor so it doesn't produce duplicates in future.
all_oligos = set()
for oligo_list in all_oligo_lists:
#Convert the list of nucleotides to a string and create a list of possible strings.
oligo = "".join(oligo_list)
all_oligos.add(oligo)
return all_oligos
#Generates a graph representing a path through the potential options in a degenerate oligo.
def degenOligoToGraph(oligo):
#Turns it into a list in case a string was inputted.
oligo_list = [letter for letter in oligo]
#Instantiate lists and incremented ints.
current_string = []
present_nodes = []
#-1 so it increments to 0, thus matching the indexing on the lists it interacts with.
current_node_num = -1
graph = {}
for i, letter in enumerate(oligo_list):
#Checks if it's just a regular base.
if letter in ["A", "G", "C", "T"]:
#Builds a list that represents a sequence of non-polymorphic nucleotide letters.
current_string.append(letter)
#This is in case the last letter is a normal base.
if i == len(oligo_list) - 1:
#If it's after a branch.
if len(present_nodes) > 0:
for node in present_nodes:
node = node + str(current_node_num)
graph.setdefault(node, [])
graph[node].append("".join(current_string) + str(current_node_num + 1))
else:
graph["".join(current_string, str(current_node_num + 1))] = []
elif letter in fastaTool.degeneratedna:
#present_nodes represents the node we're accessing. It's a list because each branch can be polymorphic in itself.
if len(present_nodes) > 0:
for node in present_nodes:
#Appends a number so identical sequences don't get assembled into nonsense or cyclic oligos.
node = node + str(current_node_num)
graph.setdefault(node, [])
#Builds the edges using the possible branches THESE branches connect to.
for base in fastaTool.degeneratedna[letter]:
current_string.append(base)
graph[node].append("".join(current_string) + str(current_node_num + 1))
current_string.pop()
present_nodes = []
#Builds the next node ready to be used - the first runthrough will just build the starting node, since it skips the above 'if'.
for base in fastaTool.degeneratedna[letter]:
current_string.append(base)
present_nodes.append("".join(current_string))
current_string.pop()
#Increments the ID number.
current_node_num += 1
current_string = []
return graph, current_node_num
#Adapted from https://www.python.org/doc/essays/graphs/.
def find_all_paths(graph, start, end, path = 0):
#First runthrough will instantiate the list, since you can't pass empty lists as defaults.
if path == 0:
path = []
#Adds the start to the empty list. Note that because this is recursive, the start won't be the beginning - just where this iteration starts.
path = path + [start]
#Checks if we reached the desired end.
if start == end:
return [path]
#Fails out if for some reason the start is nonsense.
if not start in graph:
return []
paths = []
#Goes through connecting nodes from the desired start.
for node in graph[start]:
#Checks it isn't going in circles.
if node not in path:
#Recursively calls with this node as the start.
newpaths = fastaTool.find_all_paths(graph, node, end, path)
#Once it finds all possible paths, drops them into a list like so.
for newpath in newpaths:
paths.append(newpath)
return paths
#Checks what type of code is attached to the fastaTool.
def validateType(self, code: str) -> dict:
#Honestly forget why I need to do this but you do.
code = code.join(code.split())
#Defaults to DNA.
codedict = fastaTool.complimentarityDNA
DNAletters = ("G", "C", "A", "T")
Tcount = 0
for letter in code.upper():
if letter == "T":
Tcount += 1
if letter not in DNAletters:
#Returns RNA if there's Us but no Ts.
if letter == "U" and not Tcount == 0:
codedict = self.complimentarityRNA
continue
#Returns amino acid if the letter isn't RNA/DNA but IS amino acid.
elif letter in self.aminoacids.keys():
codedict = self.standard_codons
continue
#Returns none if the inputted code is gibberish.
else:
codedict = "None"
self._logger.error("Code type not valid.")
return codedict
def NucleotideCount(self) -> int:
Acount = self.code.count("A")
Tcount = self.code.count("T")
Gcount = self.code.count("G")
Ccount = self.code.count("C")
return Acount, Tcount, Gcount, Ccount
@staticmethod
def transcribeDNA(DNA: str) -> str:
output = ("U" if letter == "T" else letter for letter in DNA)
return "".join(output)
@staticmethod
def reverseComplement(code: str, codetype: dict) -> str:
complement_letters = [codetype[letter] for letter in list(code)]
complement_letters.reverse()
return "".join(complement_letters)
def translateNuc(self) -> SeqRecord:
code = self.code
frame = self.frame
#This function relies on Blastx to work out the frame.
if frame == "N/A":
self._logger.error(f"{self.ID} has no frame value - suggest manual alignment.")
return
#Checks if the contig is backwards.
if frame < 0:
code = self.reverseComplement(code, self.codedict)
frame = abs(frame)
#Translates in frame.
if self.codedict == self.complimentarityDNA:
code = self.transcribeDNA(code)
#No point translating amino acids...
if self.codedict == fastaTool.aminoacids:
self._logger.error("Can't translate peptides!")
return
#Makes a list, then fills it with amino acids from codons as it moves along.
protein_letters = []
start = frame - 1
end = start + 3
while start <= (len(code) - 3):
codon = code[start:end]
protein_letters.append(fastaTool.standard_codons[codon])
start += 3
end += 3
#List to string.
self.transl_protein = "".join(protein_letters)
#Returns a SeqRecord of the protein with the same name as the original DNA.
return SeqRecord(self.transl_protein, self.ID)
#Slices a fasta into chunks of given size - first x or last x.
def sliceFasta(fasta: SeqRecord, length: int, last = False):
new_fasta = []
for seq in fasta:
#Doesn't slice it if it's shorter than the intended size.
if length > len(seq): length = len(seq) - 1
sliced = seq.seq[length:] if last else seq.seq[:length]
new_seq = SeqRecord(sliced, seq.id)
new_fasta.append(new_seq)
SeqIO.write(new_fasta, f"{fasta[:-6]}_sliced.fasta", "fasta")
def __init__(self, code: str, frame = 1, ID = "N/A"):
self.ID = ID
self.codedict = self.validateType(code)
self.code = code
self.frame = frame
class blastTool(seqHandler):
#Header for outputted .csvs. The order is relevant so don't juggle it
#just attach to the end.
header = ("species", "query % coverage",
"% identity", "contig length",
"contig name", "NCBI accession",
"BLAST score", "Frame", "alignment",
"matched length")
__slots__ = ['filename', 'queries', 'blast_type', 'aln', 'query_count']
def __init__(self, handle: str):
self.filename = handle.split("/")[-1][:-4]
result_handle = open(handle)
self.queries = NCBIXML.parse(result_handle)
self.blast_type = "None"
self.aln = {}
self.query_count = 0
def __str__(self):
return f"{self.filename} : {len(self.aln)} hits over {self.query_count} contigs greater than minimum length bp."
def unpackBlastRecord(self) -> list:
records = []
for hit in self.aln.values():
contig_name = hit["contig name"]
if contig_name == "":
self._logger.warning(f"No alignment found for {hit}.")
continue
contig_id = contig_name.split(" ")[0]
current_fasta = [contig_id, hit["Frame"]]
records.append(current_fasta)
return records
def getAlignments(self, search_term: str, minlength: int, get_all = False):
for query in self.queries:
#Fetches the application type if it doesn't already know.
if self.blast_type == "None":
self.blast_type = query.application
if len(query.alignments) == 0 or query.query_length < minlength:
continue
self.query_count += 1
to_check = len(query.alignments) if get_all == True else 1
for i in range(to_check):
alignment = query.alignments[i]
#Checks if there is a search term, and if that term is in the hit.
if search_term and search_term not in alignment.hit_def.upper():
continue
#Appends alignment to the aln dict. q1a1 = query 1, alignment 1.
self.aln[f"q{self.query_count}a{i+1}"] = self.getData(alignment, query)
@count_calls
def getData(self, alignment, query, hspno = 0) -> dict:
def getpercentage(a, b):
return(a / b) * 100
#Assumes no frame.
frame = "N.A"
hsp = alignment.hsps[hspno]
ungapped = hsp.align_length - hsp.gaps
coverage = getpercentage(ungapped,
query.query_length)
identity = getpercentage(hsp.identities,
hsp.align_length)
#These go by the formatting outputted by NCBI -
#the accession number is in the ID at different places.
if self.blast_type.endswith("N"):
splitnum = 3
elif self.blast_type.endswith("P"):
splitnum = 1
elif self.blast_type.endswith("X"):
splitnum = 1
#Adds the frame back in if it's a Blastx.
frame = hsp.frame
accession = alignment.hit_id.split("|")[splitnum]
db_type = alignment.hit_id.split("|")[0]
#Unused for now - gb is genbank, ref is refseq.
#Must be in the same order as the header.
dict_values = (alignment.hit_def,
coverage,
identity,
query.query_length, query.query,
accession, hsp.score,
frame[0], hsp.query, ungapped)
return {title : dict_values[i] for i, title in enumerate(blastTool.header)}
#Outputs a csv with the header and the values.
def outputCSV(self, output: str, filename: str, add_text: str):
out_file = f"{output}/{filename}_{add_text}.csv"
with open(out_file,'w+', encoding='UTF8', newline='') as virus_csv:
csv_writer = csv.writer(virus_csv)
csv_writer.writerow(blastTool.header)
for hit in self.aln.values():
csv_writer.writerow(hit.values())
self._logger.info(f"csv written to {out_file}.")
class rmaTool():
def __str__(self):
outputstr = ""
for key in self._info_dict:
if self._sorted == "contig":
outputstr += f"\n {key} : Rank: {self._info_dict[key][0]} Assignment: {self._info_dict[key][1]}"
elif self._sorted == "virus":
outputstr += f"\n {key} : {self._info_dict[key]}"
else:
outputstr = "Unsorted, quitting."
break
return outputstr
#Amend to also run the root.
@staticmethod
def runRma2info(filename: str, addtxt = "_info") -> str:
file_no_extension = filename[:-5]
#Note: probably worth changing this to all output to a given directory. At the moment it drops it into the MEGAN directory for you.
outfile = f"{file_no_extension}{addtxt}.txt"
with open(outfile, "w") as output:
subprocess.run(["rma2info", "--in", filename, "-vo", "-n", "-r2c", "Taxonomy", "-r"], stdout = output)
return outfile
def __init__(self, filename: str, sortby: str):
funcdict = {"contig" : self.sortByContig,
"virus" : self.sortByVirus}
self.rma_txt = self.runRma2info(filename) if filename.endswith(".rma6") else filename
self._info_dict = funcdict[sortby]()
def unpackInfo(self):
return self._info_dict, self._sorted
def sortByContig(self) -> dict:
info_dict = {}
with open(self.rma_txt, 'r') as info:
for line in info.readlines():
contig_name, rank, *virus_name = line.split("\t")
virus_name = "_".join(virus_name).strip()
virus_name = virus_name.replace(" ", "_")
info_dict[contig_name] = [rank, virus_name]
print(info_dict[contig_name])
self._sorted = "contig"
return info_dict
def sortByVirus(self) -> dict:
info_dict = defaultdict(list)
with open(self.rma_txt, 'r') as info:
for line in info.readlines():
contig_name, rank, virus_name = line.split("\t")
virus_name = virus_name.replace(" ", "_")
info_dict[virus_name].append(contig_name)
self._sorted = "virus"
return info_dict
|
Angua-Luggage
|
/Angua_Luggage-0.0.9.tar.gz/Angua_Luggage-0.0.9/Angua_Luggage/FastaKit.py
|
FastaKit.py
|
import argparse
import openpyxl as opxl
from FastaKit import seqHandler
def getVir(filename):
def checkOptions(checklist):
try:
for test in checklist:
if test["opt"] == currentVirusDict[test["key"]]:
pass
else:
return
allVirusDict[values[0]] = currentVirusDict
except TypeError:
print(f"Missing taxa on {values[0]}.")
checklist = [{"opt" : options.host,
"key" : "Host"}]
if options.nuc:
checklist.append({"opt" : options.nuc,
"key" : "Genome composition"})
if options.family:
checklist.append({"opt" : options.family,
"key" : "Family"})
if options.genus:
checklist.append({"opt" : options.genus,
"key" : "Genus"})
allVirusDict = {}
wb = opxl.load_workbook(filename)
sheet = wb.active
for row in sheet.iter_rows(min_row = 2):
values = [data.value for data in row]
currentVirusDict = {"Realm" : values[2],
"Kingdom" : values[4],
"Class" : values[8],
"Order" : values[10],
"Family" : values[12],
"Genus" : values[14],
"Species" : values[16],
"Exemplar" : values[17],
"Virus name" : values[18],
"Abbreviations" : values[19],
"Isolate designation" : values[20],
"GENBANK accession" : values[21],
"Genome coverage" : values[22],
"Genome composition" : values[23],
"Host" : values[24]}
checkOptions(checklist)
print(f"Searching for {len(allVirusDict)} entries.")
return allVirusDict
def getFastas(virdict, handler):
id_list = [virus["GENBANK accession"] for virus in virdict.values()]
handler.fetchEntrezFastas(id_list = id_list, email = options.email, api = options.api, output = options.output)
def parseArguments():
parser = argparse.ArgumentParser(description = "Fetches a list of viruses from the ICTV formatted file.")
parser.add_argument("input",
help = "Input folder containing .xlsx files. Required.")
parser.add_argument("output",
help = "Output folder for the db. Required.")
parser.add_argument("email",
help = "Entrez email.")
parser.add_argument("-a", "--api",
help = "api_key for Entrez email. Allows 10 queries per second instead of 3")
parser.add_argument("-g", "--genus",
help = "Restricts db to a genus.")
parser.add_argument("-f", "--family",
help = "Restricts db to a family.")
parser.add_argument("-n", "--nuc",
help = "Restricts db to a nucleotide type, Baltimore classification.",
choices = ["dsdna",
"ssrna+", "ssrnam", "ssrna",
"ssdna+", "ssdnm", "ssdna", "ssdna+m)"])
parser.add_argument("-ho", "--host",
help = "Restricts db to a host type. Default plant.",
choices = ["plants",
"algae",
"fungi",
"archaea",
"vertebrates",
"bacteria"],
#Finish filling this out.
default = "plants")
parser.add_argument("-db", "--blastdb",
help = "Construct Blastdb from nucleotide fasta.",
action = "store_true")
parser.add_argument("--dbname",
help = "Name of the resulting database.",
default = "db")
#Add toggle for exemplar or not. Store_true and exmplar = E etc.
return parser.parse_args()
options = parseArguments()
if not options.api:
options.api = False
nucdict = {"dsdna" : "dsDNA",
"ssrna+" : "ssRNA(+)",
"ssrnam" : "ssRNA(-)",
"ssrna" : "ssRNA",
"ssdna+" : "ssDNA(+)",
"ssdnam" : "ssDNA(-)",
"ssdna" : "ssDNA",
"ssdna+m" : "ssDNA(+/-)"}
handler = seqHandler(folder = options.input, folder_type = "ICTV_db")
toparse = handler.getFiles(file_end = ".xlsx")
virus_Dict = {}
for file in toparse:
virus_Dict.update(getVir(file))
getFastas(virus_Dict, handler)
if options.blastdb:
handler.makeBlastDb(options.output, options.dbname)
|
Angua-Luggage
|
/Angua_Luggage-0.0.9.tar.gz/Angua_Luggage-0.0.9/Angua_Luggage/ICTVEntrez.py
|
ICTVEntrez.py
|
import argparse
import sys
from FastaKit import seqHandler
def parseArguments():
parser = argparse.ArgumentParser(description = "Runs 'text search'.")
parser.add_argument("file",
help = "Blast .xml file to search.")
parser.add_argument("output",
help = "Output folder.")
parser.add_argument("-a", "--all",
help = "Give all hits, not just the top hit for each query.",
action = "store_true")
parser.add_argument("-st", "--searchterm",
help = "Text to look for in the Blast output. Default VIRUS.",
default = "VIRUS")
parser.add_argument("-ml", "--minlength",
help = "Minimum contig length to check. Default 200.",
default = 200)
parser.add_argument("-csv", "--outputcsv",
help = "Output findings as a .csv file.",
action = "store_true")
#Not yet implemented.
parser.add_argument("-bl", "--blacklist",
help = "Term to exclude. Default 'retrovirus'.",
default = "retrovirus")
parser.add_argument("-c", "--contigs",
help = ".fasta file containing the contigs used for the Blast query.",
default = False)
return parser.parse_args()
def runTextSearch(search_term: str, in_file: str, output: str, get_all: bool, minlength: int, output_csv: str, contigs = False):
if not in_file.endswith(".xml"):
print("File needs to be in xml. Please check the Blast documentation for outputting this format.")
quit()
handler = seqHandler(xml = in_file)
handler.get_BlastRecord(search_term, minlength, get_all = get_all)
if contigs:
handler.addFasta(contigs)
handler.unpackBlastRecord()
handler.outputFasta(output, search_term)
if output_csv:
handler.outputCSV(output, "".join(in_file.split("/"))[-1], search_term)
seqHandler._logger.info("TextSearch complete.")
handler.outputFullLog(["Fastas", "Alignments", "CSVs"])
#Allows running as standalone if need be - it will unpack the arguments itself.
#We're looking for a way to avoid the repetition.
if __name__ == '__main__':
options = parseArguments()
options.searchterm = options.searchterm.upper()
sys.exit(runTextSearch(options.searchterm, options.file, options.output, options.all, options.minlength, options.outputcsv, options.contigs))
|
Angua-Luggage
|
/Angua_Luggage-0.0.9.tar.gz/Angua_Luggage-0.0.9/Angua_Luggage/TextSearch.py
|
TextSearch.py
|





# AniManga

AniManga is a python module which scrapes the web to get information on Anime, Manga (and hentai).
## Installation
```
python3 -m pip install AniManga
```
## Manga
- Get description, author, cover page and reviews of a Manga by name.
### Example:
```py
from AniManga import Manga
manga_description = Manga().get_manga_description("naruto")
print(manga_description)
```
Output:
```
'Once, the ninja village of Konohagakure was attacked by an evil nine-tailed fox spirit. This demon slaughtered many people until the leader of Konohagakure, the 4th Hokage, sacrificed his life to seal the fox inside a newborn child - Naruto Uzumaki. Now, twelve years later, Naruto is a member of the Ninja Academy; but due to his past Naruto is shunned by the rest of the village, and since he has no friends or family he plays the part of class idiot to get attention. However, he is determined to gain respect by becoming the next Hokage - the most powerful shinobi in the village. With his apparent lack of abilities, will Naruto be able to realize his goal through determination alone?'
```
Detailed docs available [here](https://github.com/centipede000/AniManga/tree/main/docs).
## Acknowledgements
### Sites scraped:
- [Anime-Planet](https://animeplanet.com)
- [nhentai](https://nhentai.net)
## Author(s):
<table>
<thead>
<tr>
<th align="center"><a href="https://github.com/centipede000"><img src="https://github.com/centipede000.png?size=115" width="115" style="max-width: 100%;"><br><sub>@centipede000</sub></a></th>
</tr>
</thead>
</table>
**[Development](https://github.com/centipede000/AniManga/tree/dev) branch recieves updates almost daily.**
|
AniManga
|
/AniManga-0.1.1.tar.gz/AniManga-0.1.1/README.md
|
README.md
|
# AnilistPython





AniList Python library (anilist.co APIv2 wrapper) that allows you to **easily search up and retrieve anime, manga, animation studio, and character information.** This library is both beginner-friendly and offers the freedom for more experienced developers to interact with the retrieved information. Provides bot support.

## Version 0.1.1 Overview
This recent update for AnilistPython has resulted in a moderate change in the library's architecture for increased efficiency and speed. Various features have also been added to the library. Listed below are some of the main additions and alterations made to the library.
**New features**:
1. Anime search by genre, year, and/or average score (finally!)
2. Offline anime retrieval support for anime - BETA
3. Manga search support
4. Auto setup feature that helps new python programmers to setup required libraries automatically
Optimization and updates:
1. The lib now has its own prebuild anime database!
2. Anime, manga, and character search functions have all been optimized, making searches faster!
3. Improved the deepsearch feature in `.get_anime()`.
4. Manually result selecting feature is now a parameter instead of a separate function (see usage below).
## How to use?
**Step One:** Library Installation
``` python
pip install AnilistPython==0.1.1
```
**Step Two:** Instance Creation
```python
from AnilistPython import Anilist
anilist = Anilist()
```
**Step Three**: Usage
The AnilistPython library has been split into three distinct sections. Each section has a different set of functions used for retrieving data in that category. Please visit the full documentation for more info or skip to the General Function Overview section for usage.
- **Anime** - ([Documentation](https://github.com/ReZeroE/AnilistPython/wiki/Anime))
- **Manga** - ([Documentation](https://github.com/ReZeroE/AnilistPython/wiki/Manga))
- **Character** - ([Documentation](https://github.com/ReZeroE/AnilistPython/wiki/Character))
## General Function Overview
The following functions are supported by AnilistPyhon version 0.1.1. Only the default parameter has been displayed below. For more information, visit the [full documentation](https://github.com/ReZeroE/AnilistPython/wiki).
```python
# ANIME
anilist.get_anime("Owari no Seraph") # returns a dictionary containing info about owari no seraph
anilist.get_anime_with_id(126830) # returns a dictionary with Code Geass (ID:126830) info
anilist.get_anime_id("ReZero") # returns Re:Zero's ID on Anilist
anilist.print_anime_info("Madoka Magica") # prints all information regarding the anime Madoka Magica
# returns a list of anime with the given restrictions
anilist.search_anime(genre=['Action', 'Adventure', 'Drama'], year=[2016, 2019], score=range(80, 95))
#CHARACTER
anilist.get_character("Emilia") # returns a dictionary containing the info about Emilia-tan
anilist.get_character_with_id(13701) # returns a dictionary with Misaka Mikoto (ID:13701) info
anilist.get_character_id("Milim") # returns character Milim's ID on Anilist
anilist.print_anime_info("Kirito") # prints all information regarding the character Kirito
# MANGA
anilist.get_manga("Seraph of the End") # returns a dictionary containing info about seraph of the end
anilist.get_manga_with_id(113399) # returns a dictionary with Tearmoon (ID:113399) info
anilist.get_manga_id("Tearmoon Empire") # returns Tearmoon Empire's ID on Anilist (manga)
anilist.print_manga_info("Tensei Slime") # prints all information regarding the manga Tensei Slime
```
Note: The feature for manually selecting the top three search results in the terminal is now controlled by a parameter (`manual_select`) in .get functions. For more information, please visit the full documentation. A sample program that has manual select enabled would be:
```python
anilist.get_anime("Owari no Seraph", manual_select=True)
```
## Discord Bot Support
AnilistPython was originally designed to support various Discord Bot features in relation to anime, but throughout the course of its development, more features became available to use by the general programs other than Discord bots. With that being said, the current version of AnilistPython has further optimized its functions for bot support. From the pre-formatted JSON file upon data retrieval to offline database support (see full documentation), it is now able to be implemented in bots with ease.
Upcoming AnilistPython Version 0.1.2 will provide functions to generate pre-formated Discord embeds (Anime, Manga, Character embeds) as well as other features that make AnilistPython bot implementations easy to use.
Note: Please make sure that parameter `manual_select` has not been set to True in bot implementations. (False by default)
## Credits
Lead Developer: Kevin L. (ReZeroE)
Special thanks to AniList's ApiV2 GraphQL Dev team for making this possible.
|
AnilistPython
|
/AnilistPython-0.1.1.tar.gz/AnilistPython-0.1.1/README.md
|
README.md
|
import importlib
import grpc
from anilius.utils.singleton import Singleton
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
class Service(metaclass=Singleton):
_services = {}
@staticmethod
def _get_services_from_proto(proto_service_path):
proto_service_module = importlib.import_module(proto_service_path)
assert hasattr(
proto_service_module, "DESCRIPTOR"
), "Should gave valid proto service path"
descriptor = getattr(proto_service_module, "DESCRIPTOR")
assert hasattr(
descriptor, "services_by_name"
), "Should gave valid proto service path"
services = getattr(descriptor, "services_by_name")
explicit_services = list()
for service in services:
explicit_services.append(services[service])
return explicit_services
def _add_rpc_handler(self, service, rpc_method, controller):
rpc_handler = self._create_rpc_handler(rpc_method, controller)
assert (
service not in self._services
or rpc_method.name not in self._services[service]
), "{} rpc for {} service is already add".format(service, rpc_method.name)
if service in self._services:
self._services[service][rpc_method.name] = rpc_handler
else:
self._services[service] = {rpc_method.name: rpc_handler}
@staticmethod
def _create_controller_wrapper(controller, request_deserializer, response_serializer):
def wrapper(request, context):
return controller(request, context, request_deserializer, response_serializer).get_response()
return wrapper
def _create_rpc_handler(self, rpc_method, controller):
request_deserializer = self._create_reflection_from_method(
rpc_method.input_type
)
response_serializer = self._create_reflection_from_method(
rpc_method.output_type
)
handler = grpc.unary_unary_rpc_method_handler(
self._create_controller_wrapper(controller, request_deserializer, response_serializer),
request_deserializer=request_deserializer.FromString,
response_serializer=response_serializer.SerializeToString,
)
return handler
@staticmethod
def _convert_file_to_module(file_name):
return file_name.split(".")[0].replace("/", ".") + "_pb2"
def _create_reflection_from_method(self, descriptor):
reflection = _reflection.GeneratedProtocolMessageType(
descriptor.name,
(_message.Message,),
{
"DESCRIPTOR": descriptor,
"__module__": self._convert_file_to_module(descriptor.file.name),
},
)
_sym_db.RegisterMessage(reflection)
return reflection
def get_generic_handlers(self):
generic_handlers = []
for service in self._services:
generic_handler = grpc.method_handlers_generic_handler(
service, self._services[service]
)
generic_handlers.append(generic_handler)
return generic_handlers
def add_handlers_to_server(self, server):
generic_handlers = self.get_generic_handlers()
server.add_generic_rpc_handlers(generic_handlers)
def add(self, proto_service_path, rpc_method, controller):
services = self._get_services_from_proto(proto_service_path)
for service in services:
methods = getattr(service, "methods_by_name")
if rpc_method in methods:
self._add_rpc_handler(
service.full_name, methods[rpc_method], controller
)
break
|
Anilius
|
/Anilius-2.0.3rc1.tar.gz/Anilius-2.0.3rc1/src/anilius/core/service.py
|
service.py
|
from abc import ABC
import grpc
from anilius.core.permission import Permission
from anilius.core.serializer import Serializer
from anilius.core.serializer_field import SerializerField
from anilius.utils.jwt import decode_jwt
from jwt import InvalidAlgorithmError, InvalidSignatureError
class Controller(ABC):
permissions = ()
payload = {}
is_authorize = False
request_serializer = None
request_deserializer = None
response_serializer = None
response_deserializer = None
_serialized_data = None
meta = {}
def __init__(self, request, context, request_deserializer, response_serializer):
self.request_deserializer = request_deserializer
self.response_serializer = response_serializer
self.request = request
self.context = context
self.metadata = context.invocation_metadata()
self.parse()
def check_permissions(self):
for permission in self.permissions:
has_perm = permission.has_perm(self)
if not has_perm:
self.raise_permission()
break
def raise_permission(self):
self.context.set_code(grpc.StatusCode.PERMISSION_DENIED)
self.context.set_details("You have not permission for this action")
def parse(self):
self.meta = {}
self._serialized_data = self.get_request_serializer(self.request).to_dict()
for data in self.metadata:
self.meta[data.key] = data.value
for permission in self.permissions:
assert isinstance(
permission, Permission
), "permissions should be type of Permission"
authorization = self.meta.get("authorization", None)
if authorization is not None:
self.extract_payload(authorization)
else:
self.payload = {}
self.is_authorize = False
def extract_payload(self, authorization):
try:
self.payload = decode_jwt(authorization)
self.is_authorize = True
except (ValueError, InvalidAlgorithmError, InvalidSignatureError):
pass
def get_valid_data(self, key, default=None):
field = self._serialized_data.get(key, None)
if not isinstance(field, SerializerField):
return default
return field.get_value()
@property
def get_request_serializer(self):
return (
self.request_serializer
if self.request_serializer is not None
else Serializer
)
def get_response(self):
self.check_permissions()
|
Anilius
|
/Anilius-2.0.3rc1.tar.gz/Anilius-2.0.3rc1/src/anilius/core/controller.py
|
controller.py
|
from collections import OrderedDict
from anilius.core.serializer_field import SerializerField
from google.protobuf.message import Message
from google.protobuf.internal.containers import RepeatedScalarFieldContainer
class SerializerMetaclass(type):
"""
This metaclass sets a dictionary named `_declared_fields` on the class.
Any instances of `Field` included as attributes on either the class
or on any of its superclasses will be include in the
`_declared_fields` dictionary.
"""
@classmethod
def _get_declared_fields(mcs, bases, attrs):
fields = [
(field_name, attrs.pop(field_name))
for field_name, obj in list(attrs.items())
if isinstance(obj, SerializerField)
]
fields.sort(key=lambda x: x[1].get_creation_counter())
# Ensures a base class field doesn't override cls attrs, and maintains
# field precedence when inheriting multiple parents. e.g. if there is a
# class C(A, B), and A and B both define 'field', use 'field' from A.
known = set(attrs)
def visit(name):
known.add(name)
return name
base_fields = [
(visit(name), f)
for base in bases
if hasattr(base, "_declared_fields")
for name, f in getattr(base, "_declared_fields").items()
if name not in known
]
return OrderedDict(base_fields + fields)
def __new__(mcs, name, bases, attrs):
attrs["_declared_fields"] = mcs._get_declared_fields(bases, attrs)
return super().__new__(mcs, name, bases, attrs)
class Serializer(SerializerField, metaclass=SerializerMetaclass):
_declared_fields = None
def __init__(self, request):
super().__init__()
assert isinstance(request, Message), "Request should be type of Message"
for field in request.ListFields():
if field[0].name in self._declared_fields:
raw_value = getattr(request, field[0].name)
raw_value = self.extract_message(raw_value)
self._declared_fields[field[0].name].set_raw_value(raw_value)
def extract_message(self, raw_value):
if isinstance(raw_value, Message):
raw_dict = {}
for field in raw_value.ListFields():
raw_value = getattr(raw_value, field[0].name)
raw_value = self.extract_message(raw_value)
raw_dict[field[0].name] = raw_value
raw_value = raw_dict
elif type(raw_value) is RepeatedScalarFieldContainer:
raw_list = []
for element in raw_value:
raw_list.append(self.extract_message(element))
raw_value = raw_list
print(type(raw_value))
return raw_value
def validate(self):
return True
def get_value(self):
return self.to_dict()
def get_declared_fields(self):
return self._declared_fields
def to_dict(self):
return dict(self.get_declared_fields())
|
Anilius
|
/Anilius-2.0.3rc1.tar.gz/Anilius-2.0.3rc1/src/anilius/core/serializer.py
|
serializer.py
|
from anilius.cache.compressors.lz4 import Lz4Compressor
from anilius.cache.exceptions import CompressorError
from anilius.cache.serializers.msgpack import MSGPackSerializer
from anilius.redis.client import RedisClient
class Cache:
_compressor = Lz4Compressor()
_serializer = MSGPackSerializer()
def decode(self, value):
"""
Decode the given value.
"""
try:
value = int(value)
except (ValueError, TypeError):
try:
value = self._compressor.decompress(value)
except CompressorError:
# Handle little values, chosen to be not compressed
pass
value = self._serializer.loads(value)
return value
def encode(self, value):
"""
Encode the given value.
"""
if isinstance(value, bool) or not isinstance(value, int):
value = self._serializer.dumps(value)
value = self._compressor.compress(value)
return value
return value
@staticmethod
def set(*args, **kwargs):
with RedisClient as client:
return client.set(*args, **kwargs)
@staticmethod
def get(key, default=None, version=None, _client=None):
with RedisClient as client:
return client.get(key, default=default, version=version, _client=_client)
@staticmethod
def delete(*args, **kwargs):
with RedisClient as client:
return client.delete(*args, **kwargs)
@staticmethod
def incr(*args, **kwargs):
with RedisClient as client:
return client.incr(*args, **kwargs)
@staticmethod
def decr(*args, **kwargs):
with RedisClient as client:
return client.decr(*args, **kwargs)
@staticmethod
def keys(*args, **kwargs):
with RedisClient as client:
return client.keys(*args, **kwargs)
@staticmethod
def ttl(*args, **kwargs):
with RedisClient as client:
return client.ttl(*args, **kwargs)
@staticmethod
def persist(*args, **kwargs):
with RedisClient as client:
return client.persist(*args, **kwargs)
@staticmethod
def expire(*args, **kwargs):
with RedisClient as client:
return client.expire(*args, **kwargs)
@staticmethod
def lock(*args, **kwargs):
with RedisClient as client:
return client.lock(*args, **kwargs)
@staticmethod
def close(**kwargs):
with RedisClient as client:
client.close(**kwargs)
@staticmethod
def touch(*args, **kwargs):
with RedisClient as client:
return client.touch(*args, **kwargs)
|
Anilius
|
/Anilius-2.0.3rc1.tar.gz/Anilius-2.0.3rc1/src/anilius/cache/cache.py
|
cache.py
|
# AnimalProfile
A python package for tagging animal experiments.
This package allows assigning tags and parameters to animals and experiments, to keep track of them and group similar conditions together.
Here is an example of a _profile_ file for an animal:

A similar file for each animal is created which consists of:
- __header__ parameters, which are labels for each animal.
such as its genetic background, its initial training method, etc.
In this example, 3 parameters are defined:
> "rewardType", "initialSpeed", and "option".
The "name" field is added automatically.
- __body__, seperated from the header by lack of a leading `#` character, consists of at least a _Sessions_ column and a _Tag_ column (added automatically).
The `_Sessions_ columns lists all the experiments, the _Tag_ column should include a descriptive and unique label for the experimental condition (e.g., _Early-Lesion-DLS_).
User can add more columns to the body (3 more columns in this example).
This package:
- detects new experiments, adds them to the _profile_ file.
- provides an interface for reading the profiles and grouping similar experiments for furthur analysis.
# Data structure
Data must be structured in a [particular format](/doc/dataStructure.md).
# Basic usage
Instructions are [here](/doc/instructions.ipynb).
# Installation
## Requirements
|
AnimalProfile
|
/AnimalProfile-0.1.tar.gz/AnimalProfile-0.1/README.md
|
README.md
|
from bs4 import BeautifulSoup
import re
from animapy.helpers.common import functions
class nwanime(functions):
def getVideos(self, offset, items, parent, position):
episodes = None
# in case the result is lower than the desired offset returns None
if len(items) > offset:
metaData = self.__getMetadata(items[offset])
links = self.__getVideoLinks(metaData['link'])
episodes = self.createObject(metaData['title'], metaData['image'], links['normal'])
if episodes != None:
parent.setResult(episodes, position)
parent.count = parent.count + 1
def getVideoFromLink(self, link):
episodes = None
links = self.__getVideoLinks(link)
episode = self.createObject(normal=links['normal'])
return episode
def getAnimesMetadata(self, items, quant):
if len(items) < quant:
quant = len(items)
result = []
# in case the result is lower than the desired offset returns None
for i in range(quant):
episodes = None
metaData = self.__getMetadata(items[i])
episode = self.createObject(metaData['title'], metaData['image'], link = metaData['link'])
if episode != None:
result.append(episode)
return result
def getSearchItems(self, anime, order):
# gets the correct URL
if order == 'date':
url = 'http://www.nwanime.com/search_result.php?&search_type=search_videos&search_id=' + anime + '&sort=adddate&search_key=&search_for=all&videoold=&ordertype=DESC'
else:
url = 'http://www.nwanime.com/search_result.php?&search_type=search_videos&search_id=' + anime + '&sort=title&search_key=&search_for=all&videoold=&ordertype=DESC'
content = self.calUrl(url)
soup = BeautifulSoup(content)
# returns all the items
return soup.findAll('div', { "class" : 'resultstats_large' })
def __getTitle(self, aTag):
return aTag.contents[0].encode('ascii','ignore')
def __getImage(self, item):
style = item.find('div')['style']
return re.findall('url\((.*?)\)', style)[0].encode('ascii','ignore')
def __getVideoLinks(self, link):
# calls to get the movie url
content = self.calUrl(link)
newSoup = BeautifulSoup(content)
normal = ''
if newSoup.find(id="embed_holder").iframe != None:
content = self.calUrl(newSoup.find(id="embed_holder").iframe.get('src'))
newSoup = BeautifulSoup(content)
scripts = newSoup.body.findAll('script')
if len(scripts) < 9:
data = scripts[2].contents[0]
normal = re.search("'file':\s'([^']+)'", data).group(0)[7:-2]
else:
data = scripts[5].contents[0]
normal = re.search('file:\s"([^"]+)"', data).group(0)[7:-2]
return {'normal': normal}
def __getMetadata(self, item):
aTag1 = item.a
children = item.findChildren(recursive=False)
for child in children:
if child.name == 'a':
aTag2 = child
title = self.__getTitle(aTag2)
image = self.__getImage(aTag1)
link = aTag2.get('href')
return {'title': title, 'image': image, 'link': link}
|
Animapy
|
/Animapy-1.5.3.1.tar.gz/Animapy-1.5.3.1/animapy/sources/nwanime.py
|
nwanime.py
|
from bs4 import BeautifulSoup
import urllib2
from animapy.helpers.common import functions
class anitube(functions):
def getVideos(self, offset, items, parent, position):
episodes = None
# in case the result is lower than the desired offset returns None
if len(items) > offset:
metaData = self.__getMetadata(items[offset])
links = self.__getVideoLinks(metaData['link'])
episodes = self.createObject(metaData['title'], metaData['image'], links['normal'], links['hd'])
if episodes != None:
parent.setResult(episodes, position)
parent.count = parent.count + 1
def getVideoFromLink(self, link):
episodes = None
links = self.__getVideoLinks(link)
episode = self.createObject(normal=links['normal'],hd=links['hd'])
return episode
def getAnimesMetadata(self, items, quant):
if len(items) < quant:
quant = len(items)
result = []
# in case the result is lower than the desired offset returns None
for i in range(quant):
episodes = None
metaData = self.__getMetadata(items[i])
episode = self.createObject(metaData['title'], metaData['image'], link = metaData['link'])
if episode != None:
result.append(episode)
return result
def getSearchItems(self, anime, order):
# gets the correct URL
if order == 'date':
url = 'http://www.anitube.se/search/basic/1/?sort=addate&search_type=&search_id=' + anime
else:
url = 'http://www.anitube.se/search/?search_id=' + anime
content = self.calUrl(url)
soup = BeautifulSoup(content)
# returns all the items
return soup.findAll('li', { "class" : 'mainList' })
def __getTitle(self, aTag):
return aTag.contents[0].encode('ascii','ignore')
def __getImage(self, item):
return item.find('img').get('src').encode('ascii','ignore')
def __getVideoLinks(self, link):
hd = ''
normal = ''
# calls to get the movie url
content = self.calUrl(link)
newSoup = BeautifulSoup(content)
data = newSoup.find(id="videoPlayer").findAll('script')[2].get('src')
response = urllib2.urlopen(data)
# loops throught the javascript lines to get the movie links
for line in response:
if ('cdn.anitu.be' in line) or ('vid.anitu.be' in line):
if '_hd' in line:
hd = line.rstrip()[9:-2]
else:
normal = line.rstrip()[9:-2]
return {'hd': hd, 'normal': normal}
def __getMetadata(self, item):
aTag = item.find('div', { "class" : 'videoTitle' }).a
title = self.__getTitle(aTag)
image = self.__getImage(item)
link = aTag.get('href')
return {'title': title, 'image': image, 'link': link.encode('ascii','ignore')}
|
Animapy
|
/Animapy-1.5.3.1.tar.gz/Animapy-1.5.3.1/animapy/sources/anitube.py
|
anitube.py
|
class NextStep( object ):
def __init__( self, clean = False ):
self.clean = clean
def __call__( self, steps ):
from animategv.animation import Step
steps.append( Step( None if self.clean else steps[ -1 ] ) )
class AddNode( object ):
def __init__( self, v ):
self.v = v
def __call__( self, steps ):
steps[ -1 ].V.add( self.v )
class HighlightNode( object ):
def __init__( self, v, color = 'red' ):
self.v = v
self.color = color
def __call__( self, steps ):
steps[ -1 ].V.add( self.v )
steps[ -1 ].hV[ self.v ] = self.color
class LabelNode( object ):
def __init__( self, v, label ):
self.v = v
self.label = label
def __call__( self, steps ):
steps[ -1 ].V.add( self.v )
steps[ -1 ].lV[ self.v ] = self.label
class UnlabelNode( object ):
def __init__( self, v ):
self.v = v
def __call__( self, steps ):
steps[ -1 ].V.add( self.v )
try:
del steps[ -1 ].lV[ self.v ]
except KeyError:
pass
class RemoveNode( object ):
def __init__( self, v ):
self.v = v
def __call__( self, steps ):
steps[ -1 ].V.discard( self.v )
try:
del steps[ -1 ].hV[ self.v ]
except KeyError:
pass
try:
del steps[ -1 ].lV[ self.v ]
except KeyError:
pass
dE = set( e for e in steps[ -1 ].E if self.v in e )
steps[ -1 ].E -= dE
for e in list(steps[ -1 ].hE.keys()):
if self.v in e:
del steps[ -1 ].hE[ e ]
class AddEdge( object ):
def __init__( self, u, v ):
self.u = u
self.v = v
def __call__( self, steps ):
steps[ -1 ].V.add( self.u )
steps[ -1 ].V.add( self.v )
steps[ -1 ].E.add( ( self.u, self.v ) )
class HighlightEdge( object ):
def __init__( self, u, v, color = 'red' ):
self.u = u
self.v = v
self.color = color
def __call__( self, steps ):
steps[ -1 ].V.add( self.u )
steps[ -1 ].V.add( self.v )
steps[ -1 ].E.add( ( self.u, self.v ) )
steps[ -1 ].hE[ ( self.u, self.v ) ] = self.color
class LabelEdge( object ):
def __init__( self, u, v, label ):
self.u = u
self.v = v
self.label_edge = label
def __call__( self, steps ):
steps[ -1 ].V.add( self.u )
steps[ -1 ].V.add( self.v )
steps[ -1 ].E.add( ( self.u, self.v ) )
steps[ -1 ].lE[ ( self.u, self.v ) ] = self.label_edge
class UnlabelEdge( object ):
def __init__( self, u, v ):
self.u = u
self.v = v
def __call__( self, steps ):
steps[ -1 ].V.add( self.u )
steps[ -1 ].V.add( self.v )
steps[ -1 ].E.add( ( self.u, self.v ) )
try:
del steps[ -1 ].lE[ ( self.u, self.v ) ]
except KeyError:
pass
class RemoveEdge( object ):
def __init__( self, u, v ):
self.u = u
self.v = v
def __call__( self, steps ):
steps[ -1 ].E.discard( ( self.u, self.v ) )
try:
del steps[ -1 ].hE[ ( self.u, self.v ) ]
del steps[ -1 ].lE[ ( self.u, self.v ) ]
except KeyError:
pass
|
AnimateGraphViz
|
/AnimateGraphViz-1.1.1-py3-none-any.whl/animategv/action.py
|
action.py
|
from __future__ import absolute_import
from email.utils import quote
import shlex
from animategv import action
class ParseException( Exception ):
pass
class Step( object ):
def __init__( self, step = None ):
if step:
self.V = step.V.copy()
self.E = step.E.copy()
self.lV = step.lV.copy()
self.lE = step.lE.copy()
else:
self.V = set()
self.E = set()
self.lV = dict()
self.lE = dict()
self.hV = dict()
self.hE = dict()
def node_format( self, v ):
fmt = []
if v in self.lV:
fmt.append( 'label="{}"'.format( quote( str( self.lV[ v ] ) ) ) )
if v in self.hV:
fmt.append( 'color={}'.format( self.hV[ v ] ) )
elif v not in self.V:
fmt.append( 'style=invis' )
if fmt:
return '[{}]'.format( ', '.join( fmt ) )
return ''
def edge_format( self, e ):
fmt = []
if e in self.lE:
fmt.append('label="{}"'.format( quote( str( self.lE[ e ] ) ) ) )
if e in self.hE:
fmt.append('color={}'.format( self.hE[ e ] ) )
elif e not in self.E:
fmt.append('style=invis')
if fmt:
return '[{}]'.format( ', '.join( fmt ) )
return ''
def __repr__( self ):
return '{{ V = {}, E = {}, hV = {}, hE = {}, L = {}, lE = {} }}'.format( self.V, self.E, self.hV, self.hE, self.lV, self.lE )
class Animation( object ):
def __init__( self ):
self._actions = []
# https: // graphviz.org/doc/info/attrs.html # k:rankdir
# "TB", "LR", "BT", "RL"
self._rankdir = 'TB';
def next_step( self, clean = False ):
self._actions.append( action.NextStep( clean ) )
def add_node( self, v ):
self._actions.append( action.AddNode( v ) )
def highlight_node( self, v, color = 'red' ):
self._actions.append( action.HighlightNode( v, color = color ) )
def label_node( self, v, label ):
self._actions.append( action.LabelNode( v, label ) )
def unlabel_node( self, v ):
self._actions.append( action.UnlabelNode( v ) )
def remove_node( self, v ):
self._actions.append( action.RemoveNode( v ) )
def add_edge( self, u, v ):
self._actions.append( action.AddEdge( u, v ) )
def highlight_edge( self, u, v, color = 'red' ):
self._actions.append( action.HighlightEdge( u, v, color = color ) )
def label_edge( self, u, v, label ):
self._actions.append( action.LabelEdge( u, v, label ) )
def unlabel_edge( self, u, v ):
self._actions.append( action.UnlabelEdge( u, v ) )
def remove_edge( self, u, v ):
self._actions.append( action.RemoveEdge( u, v ) )
def set_direction(self, dir):
self._rankdir = dir
def parse( self, lines ):
action2method = {
'ns' : self.next_step,
'an' : self.add_node,
'hn' : self.highlight_node,
'ln' : self.label_node,
'un' : self.unlabel_node,
'rn' : self.remove_node,
'ae' : self.add_edge,
'he' : self.highlight_edge,
'le' : self.label_edge,
'ue' : self.unlabel_edge,
're' : self.remove_edge,
}
for line in lines:
parts = shlex.split( line.strip(), True )
if not parts: continue
action, params = parts[ 0 ], parts[ 1: ]
try:
action2method[ action ]( *params )
except KeyError:
raise ParseException( 'unrecognized command: {}'.format( action ) )
except TypeError:
raise ParseException( 'wrong number of parameters: {}'.format( line.strip() ) )
return
def steps( self ):
steps = [ Step() ]
for action in self._actions:
action( steps )
return steps
def graphs( self ):
steps = self.steps()
V, E = set(), set()
for step in steps:
V |= step.V
E |= step.E
graphs = []
for n, s in enumerate( steps ):
graph = [ 'digraph G {' ]
graph.append('rankdir='+self._rankdir)
for v in V: graph.append( '"{}" {};'.format( quote( str( v ) ), s.node_format( v ) ) )
for e in E: graph.append( '"{}" -> "{}" {};'.format( quote( str( e[ 0 ] ) ), quote( str( e[ 1 ] ) ), s.edge_format( e ) ) )
graph.append( '}' )
graphs.append( '\n'.join( graph ) )
return graphs
|
AnimateGraphViz
|
/AnimateGraphViz-1.1.1-py3-none-any.whl/animategv/animation.py
|
animation.py
|
AnimazePy is a Python wrapper for the Animaze API, providing an easy-to-use interface to interact with the Animaze application.
With this module, developers can conveniently send commands and control various aspects of Animaze programmatically.
Key Features:
- Establish a WebSocket connection to the Animaze API
- Send commands to control avatars, behaviors, and animations
- Retrieve avatar information and icons
- Set camera field of view (fov) and start a broadcast
- Select idle animations, scenes, emotes, and special actions
- and more!
For more information, documentation, and usage examples, visit the GitHub repository: https://github.com/gitagogaming/AnimazePy/)https://github.com/gitagogaming/AnimazePy/
|
AnimazePy
|
/AnimazePy-1.0.1.tar.gz/AnimazePy-1.0.1/README.md
|
README.md
|
__name__ = "AnimeBot"
__author__ = "Pirxcy"
__version__ = "3.0.0"
try:
# System imports.
from typing import Tuple, Any, Union
import random
import asyncio
import time
import itertools
import unicodedata
import ssl
import json
import time
import re
import aiohttp
import discord
import timeago
import sanic
import ssl
from discord.ext import commands, tasks
from discord.ext.commands import has_permissions
from discord.ext.commands import guild_only
from itertools import cycle
from sanic import Sanic
from sanic.response import text
print("Bot Logging In...")
with open('config.json') as f:
data = json.load(f)
prefix1 =(data['prefix'])
TOKEN =(data['token'])
GAME_NAME = 'My Prefix is !\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n Enjoy'
status = cycle(['!help' 'with Hiro 💖' 'On {self.bot.servers}'])
app = Sanic(__name__)
bot = commands.Bot(command_prefix=prefix1)
client = commands.Bot(command_prefix=prefix1)
bot.remove_command('help')
kalib = "AnimeBot"
@app.route('/')
async def hello_world(request):
return text('Bot Ready As {kalib}')
@bot.command()
async def ride(ctx):
file = discord.File("daft.mp4")
await ctx.send(file=file)
@bot.command()
async def lucidcry(ctx):
file = discord.File("lucid.mp4")
await ctx.send(file=file)
@bot.command()
async def Ride(ctx):
file = discord.File("daft.mp4")
await ctx.send(file=file)
@bot.command()
async def Darling(ctx):
file = discord.File("darling.mp4")
await ctx.send(file=file)
@bot.command()
async def darling(ctx):
file = discord.File("darling.mp4")
await ctx.send(file=file)
@bot.event
async def on_ready():
ch_pr.start()
print(f'Bot is ready! Logged in as {bot.user.name} [{bot.user.id}]')
await app.create_server(host="0.0.0.0",port=8080, return_asyncio_server=True)
@tasks.loop(seconds=10)
async def ch_pr():
statuses = ["!help", f"with {len(bot.guilds)} Servers | !help", "with Hiro 💖"]
status = random.choice(statuses)
await bot.change_presence(activity=discord.Game(name=status))
@bot.command()
async def Help(ctx):
url = "https://pastebin.com/raw/YsVc4nN6"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'All Commands And What They DO',
description=text,
color=0xff0000
)
await ctx.send(embed=embed)
@bot.command()
async def help(ctx):
url = "https://pastebin.com/raw/YsVc4nN6"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'All Commands And What They DO',
description=text,
color=0xff0000
)
await ctx.send(embed=embed)
@bot.command()
async def Upcoming(ctx):
url = "https://pastebin.com/raw/Ksrreu80"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Upcoming Features',
description=text,
color=0xff0000
)
await ctx.send(embed=embed)
@bot.command()
async def upcoming(ctx):
url = "https://pastebin.com/raw/Ksrreu80"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Upcoming Features',
description=text,
color=0xff0000
)
await ctx.send(embed=embed)
@bot.command()
async def Whatsnew(ctx):
url = "https://pastebin.com/raw/6zp34aK0"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Newest Features',
description=text,
color=0xff0000
)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def whatsnew(ctx):
url = "https://pastebin.com/raw/6zp34aK0"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Newest Features',
description=text,
color=0xff0000
)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def nuke(ctx, amount=9999):
await ctx.channel.purge(limit=amount)
time.sleep(0.5)
responses_list = ['***Nuked*** __this channel__ :white_check_mark:\n"You are now...my darling!"\n「あなたは今...私の最愛の人です!」\nZero Two (Japanese: ゼロツー, Hepburn: Zero Tsū)\nhttps://i.kym-cdn.com/photos/images/original/001/332/861/61b.gif', '***Nuked*** __this channel__ :white_check_mark:\n"I feel the same! Zero Two, I love you too!"\n-Hiro\n https://imgur.com/nL0CSWm', '***Nuked*** __this channel__ :white_check_mark:\n“ Im not gonna run away, I never go back on my word! That’s my nindo: my ninja way.”\n-Naruto\nhttps://i.pinimg.com/originals/d8/7e/17/d87e1799ab54d12da50ebff62b6f584b.gif', '***Nuked*** __this channel__ :white_check_mark:\n“Pika, pika chu”\nhttps://i.giphy.com/media/h3XmHtJQLEMyk/giphy.webp', '***Nuked*** __this channel__ :white_check_mark:\n“ My friends were the first to accept me for who I am.”\n Naruto.\n https://i.pinimg.com/originals/04/c7/89/04c7897eaac3a6cc37aa9989366b9c18.gif', '***Nuked*** __this channel__ :white_check_mark:\nhttps://i.kym-cdn.com/photos/images/newsfeed/001/334/590/96c.gif', '***Nuked*** __this channel__ :white_check_mark:\nhttps://imgur.com/a/86qKhoz']
choice = random.choice(responses_list)
await ctx.send(f"{choice}")
if not ctx.author.guild_permissions.manage_channels:
await ctx.send("You don't have permission")
@bot.command()
async def Nuke(ctx, amount=9999):
await ctx.channel.purge(limit=amount)
time.sleep(0.5)
responses_list = ['***Nuked*** __this channel__ :white_check_mark:\n"You are now...my darling!"\n「あなたは今...私の最愛の人です!」\nZero Two (Japanese: ゼロツー, Hepburn: Zero Tsū)\nhttps://i.kym-cdn.com/photos/images/original/001/332/861/61b.gif', '***Nuked*** __this channel__ :white_check_mark:\n"I feel the same! Zero Two, I love you too!"\n-Hiro\n https://imgur.com/nL0CSWm', '***Nuked*** __this channel__ :white_check_mark:\n“ Im not gonna run away, I never go back on my word! That’s my nindo: my ninja way.”\n-Naruto\nhttps://i.pinimg.com/originals/d8/7e/17/d87e1799ab54d12da50ebff62b6f584b.gif', '***Nuked*** __this channel__ :white_check_mark:\n“Pika, pika chu”\nhttps://i.giphy.com/media/h3XmHtJQLEMyk/giphy.webp', '***Nuked*** __this channel__ :white_check_mark:\n“ My friends were the first to accept me for who I am.”\n Naruto.\n https://i.pinimg.com/originals/04/c7/89/04c7897eaac3a6cc37aa9989366b9c18.gif', '***Nuked*** __this channel__ :white_check_mark:\nhttps://i.kym-cdn.com/photos/images/newsfeed/001/334/590/96c.gif', '***Nuked*** __this channel__ :white_check_mark:\nhttps://imgur.com/a/86qKhoz']
choice = random.choice(responses_list)
await ctx.send(f"{choice}")
if not ctx.author.guild_permissions.manage_channels:
await ctx.send("You don't have permission")
@bot.command()
async def fotd(ctx):
url = "https://pastebin.com/raw/AycH0XVP"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Your Fact of The Day is',
description=text,
color=0xff0000
)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def Fotd(ctx):
url = "https://pastebin.com/raw/AycH0XVP"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Your Fact of The Day is',
description=text,
color=0xff0000
)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def qotd(ctx):
url = "https://pastebin.com/raw/Bgi68c8d"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Your Question of The Day is',
description=text,
color=0xff0000
)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def Qotd(ctx):
url = "https://pastebin.com/raw/Bgi68c8d"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Your Question of The Day is',
description=text,
color=0xff0000
)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def crash(ctx):
async with aiohttp.ClientSession() as session:
async with session.get("https://pastebin.com/raw/6k6dgRTq") as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(color=0xff0000)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(text)
@bot.command()
async def status(ctx):
async with aiohttp.ClientSession() as session:
async with session.get("https://pastebin.com/raw/0ze8ad4V") as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(color=0xff0000)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(text)
@bot.command()
async def ping(ctx):
t_1 = time.perf_counter()
await ctx.trigger_typing()
t_2 = time.perf_counter()
ms = round((t_2-t_1)*1000)
embed = discord.Embed(color=0xff0000)
embed.add_field(name=":ping_pong: Pong!", value = f"{ms}ms")
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def Ping(ctx):
t_1 = time.perf_counter()
await ctx.trigger_typing()
t_2 = time.perf_counter()
ms = round((t_2-t_1)*1000)
embed = discord.Embed(color=0xff0000)
embed.add_field(name=":ping_pong: Pong!", value = f"{ms}ms")
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def invite(ctx):
await ctx.send(f"{ctx.author.mention} Add This To Your Server For an Awesome Anime Experience https://discord.com/api/oauth2/authorize?client_id=770030131418234892&permissions=8&scope=bot")
@bot.command()
async def commands(ctx):
embed = discord.Embed(color=0x00ffff)
embed.add_field(name=f"{bot.user.name}'s Commands List", value = "`ping` `help` `invite` `ban` `kick` `choose` `jointime` `nuke` `whatsnew` `upcoming` `crash` `qotd` `fotd` `commands` `ride`")
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def Commands(ctx):
embed = discord.Embed(color=0x00ffff)
embed.add_field(name=f"{bot.user.name}'s Commands List", value = "`ping` `help` `invite` `ban` `kick` `choose` `jointime` `nuke` `whatsnew` `upcoming` `crash` `qotd` `fotd` `commands` `ride`")
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def credit(ctx):
embed = discord.Embed(color=0x00ffff)
embed.add_field(name=f"{bot.user.name} was Coded/Made By", value = "Pirxcy and Gomashio")
embed.set_thumbnail(url='https://i.imgur.com/c1bumIE.gif')
embed.set_image(url='https://cdn.discordapp.com/attachments/769636434264850433/771057317529518100/tenor.gif')
embed.set_footer(text='Support Him By Joining The Discord Server - https://discord.gg/xHHv8aJ')
await ctx.send(embed=embed)
@bot.command()
async def credits(ctx):
embed = discord.Embed(color=0x00ffff)
embed.add_field(name=f"{bot.user.name} was Coded/Made By", value = "<@733302490753269852> and Gomashio")
embed.set_thumbnail(url='https://i.imgur.com/c1bumIE.gif')
embed.set_image(url='https://i.imgur.com/c1bumIE.gif')
embed.set_footer(text='Support Him By Joining The Discord Server - https://discord.gg/xHHv8aJ')
await ctx.send(embed=embed)
@bot.command()
async def Credit(ctx):
embed = discord.Embed(color=0x00ffff)
embed.add_field(name=f"{bot.user.name} was Coded/Made By", value = "<@733302490753269852>")
embed.set_thumbnail(url='https://i.imgur.com/c1bumIE.gif')
embed.set_author(name='Pirxcy')
embed.set_image(url='https://i.imgur.com/c1bumIE.gif')
embed.set_footer(text='Support Him By Joining The Discord Server - https://discord.gg/xHHv8aJ')
await ctx.send(embed=embed)
@bot.command()
async def Credits(ctx):
embed = discord.Embed(color=0x00ffff)
embed.add_field(name=f"{bot.user.name} was Coded/Made By", value = "<@733302490753269852>")
embed.set_thumbnail(url='https://i.imgur.com/c1bumIE.gif')
embed.set_author(name='Pirxcy')
embed.set_image(url='https://i.imgur.com/c1bumIE.gif')
embed.set_footer(text='Support Him By Joining The Discord Server - https://discord.gg/xHHv8aJ')
await ctx.send(embed=embed)
@bot.command()
@has_permissions(ban_members=True)
async def ban(ctx, member: discord.Member, *, reason: str = None):
if reason == None:
reason = 'No Reason Provided'
await member.kick(reason=reason)
embed = discord.Embed(color=0x0000ff)
embed.add_field(name=f"{user} has been banned by {ctx.message.author}", value = f"`{reason}`")
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
if not ctx.author.guild_permissions.manage_channels:
await ctx.send("You don't have permission")
@bot.command()
@has_permissions(kick_members=True)
async def kick(ctx, member: discord.Member, *, reason: str = None):
if reason == None:
reason = 'No Reason Provided'
await member.kick(reason=reason)
embed = discord.Embed(color=0x0000ff)
embed.add_field(name=f"{user} has been kicked by {ctx.message.author}", value = f"`{reason}`")
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
if not ctx.author.guild_permissions.manage_channels:
await ctx.send("You don't have permission")
@bot.command()
async def choose(ctx, *choice: str):
await ctx.send(f"{ctx.message.author} - I choose **{choice}**!")
@bot.command()
@guild_only()
async def jointime(ctx, member: discord.Member = None):
if member == None:
member = ctx.message.author
await ctx.send(f'{member.name} joined at `{member.joined_at}`')
@bot.command(aliases=["8ball"])
async def eightball(ctx):
responses_list = ['Yes.', 'No.', 'Maybe.', 'Definitely', 'Not at all.', 'Ask me another time.']
choice = random.choice(responses_list)
embed = discord.Embed(color=0xFFFFFF)
embed.add_field(name=":8ball: 8Ball Says...", value=f'`{choice}`')
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command(aliases=["anime?"])
async def howanime(ctx):
responses_list = ['5% Anime (╯°□°)╯︵ ┻━┻', '50% Anime', '69% Anime', 'Your Fulltime Anime God ¯\_(ツ)_/¯', 'Not at all Anime...', '75% Anime']
choice = random.choice(responses_list)
embed = discord.Embed(color=0xFFFFFF)
embed.add_field(name="You Are...", value=f'`{choice}`')
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command(aliases=["howbig"])
async def pp(ctx):
responses_list = ['8=D', '8==D', '8===D', '8====D', '8=====D', '8=========D']
choice = random.choice(responses_list)
embed = discord.Embed(color=0xFFFFFF)
embed.add_field(name="Ur Size is... ", value=f'`{choice}`')
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
bot.run(TOKEN)
|
AnimeBotPackages
|
/AnimeBotPackages-3.0.0.tar.gz/AnimeBotPackages-3.0.0/AnimeBot/__init__.py
|
__init__.py
|
# README.md
<p align="center">曾几何时,打算看会动漫</p>
<p align="center">兴致勃勃,发现需要会员</p>
<p align="center">不负众望,找到免费网站</p>
<p align="center">网络原因,把我拒之门外</p>
<p align="center">惆怅万分,决定用爬虫改变现状</p>
<p align="center">离线播放,享受流畅的观看体验</p>
这是一款在Windows平台下基于轻量级框架<code>[**Ruia**](https://github.com/howie6879/ruia)</code>、专门爬取免费动漫的爬虫,底层使用<code>aiohttp</code>库,使用异步模式,大幅增加爬取及下载速度。可**离线播放**各大动漫,支持**命令行输入**,立志成为最实用,最轻量的动漫管理及下载助手
## ❓如何使用
1. 首先来到[这里](https://www.python.org/downloads)下载Python解释器, 要求Python3.8及以上版本,安装即可
2. 然后,打开命令提示符,输入 <code>pip install AnimeCrawler</code>
3. 其次,输入 <code>AnimeCrawler search -t "动漫标题"</code>,来搜索动漫
4. 最后,复制输出的下载命令,粘贴回车就可以下载啦
- 输入 <code>AnimeCrawler -h</code> 会有详细的说明
> 下载后的文件在您的视频文件夹里
如果您想体验最新的功能,请转到dev分支~
## 🚀我想帮忙
十分感谢您有这个想法。这个项目仍在青涩年华,总会有一些跌跌撞撞的时候,也许您的举手之劳,能造就更好的它
请使用Github Issue来提交bug或功能请求。这样有利于我了解您的需求,也更好的投入精力解决问题。力所能及的话,可以先提个Issue后拉个Pull Requests,那样再好不过了
> 有的时候在dev分支中,您的需求如一些bug(或feature)已解决(或已被实现),请确认后再提交Issue
## 📝TODO
- [x] 下载多集动漫
- [x] 支持命令行工具
- [ ] 支持检索动漫
- [ ] 可更换下载源
- [ ] 支持上传网盘
- [ ] <span style="text-decoration: line-through">甚至是GUI</span>
## 🧱从源码搭建
1. 点击[这里](https://github.com/Senvlin/AnimeCrawler/releases)找到最新版本,下载源码
2. 转到项目目录,使用 <code>pip install -r requirement.txt</code> 安装依赖库
3. 随后使用 <code>python -m AnimeCrawler download -t "动漫标题" -u "URL"</code> 运行即可
## ❗ 声明
此项目只因个人兴趣而开发,仅供学习交流使用,无任何商业用途
下载的资源均来自可搜索到的、各网站提供的公开引用资源,所有视频版权均归原作者及网站所有
您应该自行承担使用此项目有可能的风险,我不保证您下载的资源的安全性,合法性,公正性。网络信息良莠不齐,请自行甄别,谢谢
|
AnimeCrawler
|
/AnimeCrawler-0.2.0b0.tar.gz/AnimeCrawler-0.2.0b0/README.md
|
README.md
|
__name__ = "AnimeDiscord"
__author__ = "Pirxcy"
__version__ = "1.0.0"
try:
# System imports.
from typing import Tuple, Any, Union
import random
import asyncio
import time
import itertools
import unicodedata
import ssl
import json
import time
import re
import aiohttp
import discord
import timeago
import sanic
import ssl
from discord.ext import commands, tasks
from discord.ext.commands import has_permissions
from discord.ext.commands import guild_only
from itertools import cycle
from sanic import Sanic
from sanic.response import text
print("Bot Logging In...")
with open('config.json') as f:
data = json.load(f)
prefix1 =(data['prefix'])
TOKEN =(data['token'])
GAME_NAME = 'My Prefix is !\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n Enjoy'
status = cycle(['!help' 'with Hiro 💖' 'On {self.bot.servers}'])
app = Sanic(__name__)
bot = commands.Bot(command_prefix=prefix1)
client = commands.Bot(command_prefix=prefix1)
bot.remove_command('help')
kalib = "AnimeBot"
@app.route('/')
async def hello_world(request):
return text('Bot Ready As {kalib}')
@bot.command()
async def ride(ctx):
file = discord.File("daft.mp4")
await ctx.send(file=file)
@bot.command()
async def lucidcry(ctx):
file = discord.File("lucid.mp4")
await ctx.send(file=file)
@bot.command()
async def Ride(ctx):
file = discord.File("daft.mp4")
await ctx.send(file=file)
@bot.command()
async def Darling(ctx):
file = discord.File("darling.mp4")
await ctx.send(file=file)
@bot.command()
async def darling(ctx):
file = discord.File("darling.mp4")
await ctx.send(file=file)
@bot.event
async def on_ready():
ch_pr.start()
print(f'Bot is ready! Logged in as {bot.user.name} [{bot.user.id}]')
await app.create_server(host="0.0.0.0",port=8080, return_asyncio_server=True)
@tasks.loop(seconds=10)
async def ch_pr():
statuses = ["!help", f"with {len(bot.guilds)} Servers | !help", "with Hiro 💖"]
status = random.choice(statuses)
await bot.change_presence(activity=discord.Game(name=status))
@bot.command()
async def Help(ctx):
url = "https://pastebin.com/raw/YsVc4nN6"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'All Commands And What They DO',
description=text,
color=0xff0000
)
await ctx.send(embed=embed)
@bot.command()
async def help(ctx):
url = "https://pastebin.com/raw/YsVc4nN6"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'All Commands And What They DO',
description=text,
color=0xff0000
)
await ctx.send(embed=embed)
@bot.command()
async def Upcoming(ctx):
url = "https://pastebin.com/raw/Ksrreu80"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Upcoming Features',
description=text,
color=0xff0000
)
await ctx.send(embed=embed)
@bot.command()
async def upcoming(ctx):
url = "https://pastebin.com/raw/Ksrreu80"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Upcoming Features',
description=text,
color=0xff0000
)
await ctx.send(embed=embed)
@bot.command()
async def Whatsnew(ctx):
url = "https://pastebin.com/raw/6zp34aK0"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Newest Features',
description=text,
color=0xff0000
)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def whatsnew(ctx):
url = "https://pastebin.com/raw/6zp34aK0"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Newest Features',
description=text,
color=0xff0000
)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def nuke(ctx, amount=9999):
await ctx.channel.purge(limit=amount)
time.sleep(0.5)
responses_list = ['***Nuked*** __this channel__ :white_check_mark:\n"You are now...my darling!"\n「あなたは今...私の最愛の人です!」\nZero Two (Japanese: ゼロツー, Hepburn: Zero Tsū)\nhttps://i.kym-cdn.com/photos/images/original/001/332/861/61b.gif', '***Nuked*** __this channel__ :white_check_mark:\n"I feel the same! Zero Two, I love you too!"\n-Hiro\n https://imgur.com/nL0CSWm', '***Nuked*** __this channel__ :white_check_mark:\n“ Im not gonna run away, I never go back on my word! That’s my nindo: my ninja way.”\n-Naruto\nhttps://i.pinimg.com/originals/d8/7e/17/d87e1799ab54d12da50ebff62b6f584b.gif', '***Nuked*** __this channel__ :white_check_mark:\n“Pika, pika chu”\nhttps://i.giphy.com/media/h3XmHtJQLEMyk/giphy.webp', '***Nuked*** __this channel__ :white_check_mark:\n“ My friends were the first to accept me for who I am.”\n Naruto.\n https://i.pinimg.com/originals/04/c7/89/04c7897eaac3a6cc37aa9989366b9c18.gif', '***Nuked*** __this channel__ :white_check_mark:\nhttps://i.kym-cdn.com/photos/images/newsfeed/001/334/590/96c.gif', '***Nuked*** __this channel__ :white_check_mark:\nhttps://imgur.com/a/86qKhoz']
choice = random.choice(responses_list)
await ctx.send(f"{choice}")
if not ctx.author.guild_permissions.manage_channels:
await ctx.send("You don't have permission")
@bot.command()
async def Nuke(ctx, amount=9999):
await ctx.channel.purge(limit=amount)
time.sleep(0.5)
responses_list = ['***Nuked*** __this channel__ :white_check_mark:\n"You are now...my darling!"\n「あなたは今...私の最愛の人です!」\nZero Two (Japanese: ゼロツー, Hepburn: Zero Tsū)\nhttps://i.kym-cdn.com/photos/images/original/001/332/861/61b.gif', '***Nuked*** __this channel__ :white_check_mark:\n"I feel the same! Zero Two, I love you too!"\n-Hiro\n https://imgur.com/nL0CSWm', '***Nuked*** __this channel__ :white_check_mark:\n“ Im not gonna run away, I never go back on my word! That’s my nindo: my ninja way.”\n-Naruto\nhttps://i.pinimg.com/originals/d8/7e/17/d87e1799ab54d12da50ebff62b6f584b.gif', '***Nuked*** __this channel__ :white_check_mark:\n“Pika, pika chu”\nhttps://i.giphy.com/media/h3XmHtJQLEMyk/giphy.webp', '***Nuked*** __this channel__ :white_check_mark:\n“ My friends were the first to accept me for who I am.”\n Naruto.\n https://i.pinimg.com/originals/04/c7/89/04c7897eaac3a6cc37aa9989366b9c18.gif', '***Nuked*** __this channel__ :white_check_mark:\nhttps://i.kym-cdn.com/photos/images/newsfeed/001/334/590/96c.gif', '***Nuked*** __this channel__ :white_check_mark:\nhttps://imgur.com/a/86qKhoz']
choice = random.choice(responses_list)
await ctx.send(f"{choice}")
if not ctx.author.guild_permissions.manage_channels:
await ctx.send("You don't have permission")
@bot.command()
async def fotd(ctx):
url = "https://pastebin.com/raw/AycH0XVP"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Your Fact of The Day is',
description=text,
color=0xff0000
)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def Fotd(ctx):
url = "https://pastebin.com/raw/AycH0XVP"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Your Fact of The Day is',
description=text,
color=0xff0000
)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def qotd(ctx):
url = "https://pastebin.com/raw/Bgi68c8d"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Your Question of The Day is',
description=text,
color=0xff0000
)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def Qotd(ctx):
url = "https://pastebin.com/raw/Bgi68c8d"
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(
title=f'Your Question of The Day is',
description=text,
color=0xff0000
)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def crash(ctx):
async with aiohttp.ClientSession() as session:
async with session.get("https://pastebin.com/raw/6k6dgRTq") as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(color=0xff0000)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(text)
@bot.command()
async def status(ctx):
async with aiohttp.ClientSession() as session:
async with session.get("https://pastebin.com/raw/0ze8ad4V") as res:
text = await res.text(encoding="utf-8")
embed = discord.Embed(color=0xff0000)
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(text)
@bot.command()
async def ping(ctx):
t_1 = time.perf_counter()
await ctx.trigger_typing()
t_2 = time.perf_counter()
ms = round((t_2-t_1)*1000)
embed = discord.Embed(color=0xff0000)
embed.add_field(name=":ping_pong: Pong!", value = f"{ms}ms")
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def Ping(ctx):
t_1 = time.perf_counter()
await ctx.trigger_typing()
t_2 = time.perf_counter()
ms = round((t_2-t_1)*1000)
embed = discord.Embed(color=0xff0000)
embed.add_field(name=":ping_pong: Pong!", value = f"{ms}ms")
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def invite(ctx):
await ctx.send(f"{ctx.author.mention} Add This To Your Server For an Awesome Anime Experience https://discord.com/api/oauth2/authorize?client_id=770030131418234892&permissions=8&scope=bot")
@bot.command()
async def commands(ctx):
embed = discord.Embed(color=0x00ffff)
embed.add_field(name=f"{bot.user.name}'s Commands List", value = "`ping` `help` `invite` `ban` `kick` `choose` `jointime` `nuke` `whatsnew` `upcoming` `crash` `qotd` `fotd` `commands` `ride`")
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def Commands(ctx):
embed = discord.Embed(color=0x00ffff)
embed.add_field(name=f"{bot.user.name}'s Commands List", value = "`ping` `help` `invite` `ban` `kick` `choose` `jointime` `nuke` `whatsnew` `upcoming` `crash` `qotd` `fotd` `commands` `ride`")
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command()
async def credit(ctx):
embed = discord.Embed(color=0x00ffff)
embed.add_field(name=f"{bot.user.name} was Coded/Made By", value = "Pirxcy and Gomashio")
embed.set_thumbnail(url='https://i.imgur.com/c1bumIE.gif')
embed.set_image(url='https://cdn.discordapp.com/attachments/769636434264850433/771057317529518100/tenor.gif')
embed.set_footer(text='Support Him By Joining The Discord Server - https://discord.gg/xHHv8aJ')
await ctx.send(embed=embed)
@bot.command()
async def credits(ctx):
embed = discord.Embed(color=0x00ffff)
embed.add_field(name=f"{bot.user.name} was Coded/Made By", value = "<@733302490753269852> and Gomashio")
embed.set_thumbnail(url='https://i.imgur.com/c1bumIE.gif')
embed.set_image(url='https://i.imgur.com/c1bumIE.gif')
embed.set_footer(text='Support Him By Joining The Discord Server - https://discord.gg/xHHv8aJ')
await ctx.send(embed=embed)
@bot.command()
async def Credit(ctx):
embed = discord.Embed(color=0x00ffff)
embed.add_field(name=f"{bot.user.name} was Coded/Made By", value = "<@733302490753269852>")
embed.set_thumbnail(url='https://i.imgur.com/c1bumIE.gif')
embed.set_author(name='Pirxcy')
embed.set_image(url='https://i.imgur.com/c1bumIE.gif')
embed.set_footer(text='Support Him By Joining The Discord Server - https://discord.gg/xHHv8aJ')
await ctx.send(embed=embed)
@bot.command()
async def Credits(ctx):
embed = discord.Embed(color=0x00ffff)
embed.add_field(name=f"{bot.user.name} was Coded/Made By", value = "<@733302490753269852>")
embed.set_thumbnail(url='https://i.imgur.com/c1bumIE.gif')
embed.set_author(name='Pirxcy')
embed.set_image(url='https://i.imgur.com/c1bumIE.gif')
embed.set_footer(text='Support Him By Joining The Discord Server - https://discord.gg/xHHv8aJ')
await ctx.send(embed=embed)
@bot.command()
@has_permissions(ban_members=True)
async def ban(ctx, member: discord.Member, *, reason: str = None):
if reason == None:
reason = 'No Reason Provided'
await member.kick(reason=reason)
embed = discord.Embed(color=0x0000ff)
embed.add_field(name=f"{user} has been banned by {ctx.message.author}", value = f"`{reason}`")
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
if not ctx.author.guild_permissions.manage_channels:
await ctx.send("You don't have permission")
@bot.command()
@has_permissions(kick_members=True)
async def kick(ctx, member: discord.Member, *, reason: str = None):
if reason == None:
reason = 'No Reason Provided'
await member.kick(reason=reason)
embed = discord.Embed(color=0x0000ff)
embed.add_field(name=f"{user} has been kicked by {ctx.message.author}", value = f"`{reason}`")
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
if not ctx.author.guild_permissions.manage_channels:
await ctx.send("You don't have permission")
@bot.command()
async def choose(ctx, *choice: str):
await ctx.send(f"{ctx.message.author} - I choose **{choice}**!")
@bot.command()
@guild_only()
async def jointime(ctx, member: discord.Member = None):
if member == None:
member = ctx.message.author
await ctx.send(f'{member.name} joined at `{member.joined_at}`')
@bot.command(aliases=["8ball"])
async def eightball(ctx):
responses_list = ['Yes.', 'No.', 'Maybe.', 'Definitely', 'Not at all.', 'Ask me another time.']
choice = random.choice(responses_list)
embed = discord.Embed(color=0xFFFFFF)
embed.add_field(name=":8ball: 8Ball Says...", value=f'`{choice}`')
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command(aliases=["anime?"])
async def howanime(ctx):
responses_list = ['5% Anime (╯°□°)╯︵ ┻━┻', '50% Anime', '69% Anime', 'Your Fulltime Anime God ¯\_(ツ)_/¯', 'Not at all Anime...', '75% Anime']
choice = random.choice(responses_list)
embed = discord.Embed(color=0xFFFFFF)
embed.add_field(name="You Are...", value=f'`{choice}`')
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
@bot.command(aliases=["howbig"])
async def pp(ctx):
responses_list = ['8=D', '8==D', '8===D', '8====D', '8=====D', '8=========D']
choice = random.choice(responses_list)
embed = discord.Embed(color=0xFFFFFF)
embed.add_field(name="Ur Size is... ", value=f'`{choice}`')
embed.set_footer(text='AnimeBot By <@733302490753269852>')
await ctx.send(embed=embed)
bot.run(TOKEN)
|
AnimeDiscord
|
/AnimeDiscord-1.0.0.tar.gz/AnimeDiscord-1.0.0/AnimeBot/__init__.py
|
__init__.py
|
from requests import get
from random import choice
class Animages:
url1 = 'https://nekos.best/api/v1/'
url2 = 'https://api.waifu.pics/sfw/'
url3 = 'https://purrbot.site/api/img/sfw/'
@classmethod
def pict(anu, url: str) -> str:
re = get(url).json()
try:
return re['url']
except:
return re['link']
@classmethod
def neko(anu):
"""Neko random images"""
nekoo = choice(['neko/img', 'neko/gif'])
nekoo = choice([anu.url3 + nekoo, 'https://api.waifu.pics/sfw/neko', anu.url1 + 'nekos'])
return anu.pict(nekoo)
@classmethod
def bite(anu):
"""Bite anime random images"""
bite = choice([anu.url1 + 'bite', anu.url2 + 'bite', anu.url3 + 'bite/gif'])
return anu.pict(bite)
@classmethod
def blush(anu):
"blush random image"
blush = choice([anu.url3 + 'blush/gif', anu.url1 + 'blush', anu.url2 + 'blush'])
return anu.pict(blush)
@classmethod
def waifu(anu):
return anu.pict(anu.url2 + 'waifu')
@classmethod
def baka(anu):
return anu.pict(anu.url1 + 'baka')
@classmethod
def bored(anu):
return anu.pict(anu.url1 + 'bored' )
@classmethod
def cry(anu):
cry = choice([anu.url3 + 'cry/gif', anu.url2 + 'cry', anu.url1 + 'cry'])
return anu.pict(cry)
@classmethod
def cuddle(anu):
cuddle = choice([anu.url1 + 'cuddle', anu.url2 + 'cuddle', anu.url3 + 'cuddle/gif'])
return anu.pict(cuddle)
@classmethod
def dance(anu):
dance = choice([anu.url1 + 'dance', anu.url2 + 'dance', anu.url3 + 'dance/gif'])
return anu.pict(dance)
@classmethod
def eevee(anu):
eevee = choice([anu.url3 + 'eevee/gif', anu.url3 + 'eevee/img'])
return anu.pict(eevee)
@classmethod
def feed(anu):
feed = choice([anu.url1 + 'feed', anu.url3 + 'feed/gif'])
return anu.pict(feed)
@classmethod
def fluff(anu):
return anu.pict(anu.url3 + 'fluff/gif')
@classmethod
def holo(anu):
return anu.pict(anu.url3 + 'holo/img')
@classmethod
def hug(anu):
hug = choice([anu.url1 + 'hug', anu.url2 + 'hug', anu.url3 + 'hug/gif'])
return anu.pict(hug)
@classmethod
def kiss(anu):
kiss = choice([anu.url1 + 'kiss', anu.url2 + 'kiss', anu.url3 + 'kiss/gif'])
return anu.pict(kiss)
@classmethod
def kitsune(anu):
return anu.pict(anu.url3 + 'kitsune/img')
@classmethod
def lick(anu):
lick = choice([anu.url1 + 'lick', anu.url2 + 'lick', anu.url3 + 'lick/gif'])
return anu.pict(lick)
@classmethod
def okami(anu):
return anu.pict(anu.url3 + 'okami/img')
@classmethod
def pat(anu):
pat = choice([anu.url1 + 'pat', anu.url2 + 'pat', anu.url3 + 'pat/gif'])
return anu.pict(pat)
@classmethod
def poke(anu):
poke = choice([anu.url1 + 'poka', anu.url2 + 'poka', anu.url3 + 'poke/gif'])
return anu.pict(poke)
@classmethod
def megumin(anu):
return anu.pict(anu.url2 + 'megumin')
@classmethod
def shinobu(anu):
return anu.pict(anu.url2 + 'shinobu')
@classmethod
def senko(anu):
return anu.pict(anu.url3 + 'senko/img')
@classmethod
def slap(anu):
slap = choice([anu.url1 + 'slap', anu.url2 + 'slap', anu.url3 + 'slap/gif'])
return anu.pict(slap)
@classmethod
def smile(anu):
smile = choice([anu.url1 + 'smile', anu.url2 + 'smile', anu.url3 + 'smile/gif'])
return anu.pict(smile)
@classmethod
def tail(anu):
return anu.pict(anu.url3 + 'tail/gif')
@classmethod
def tickle(anu):
tickle = choice([anu.url1 + 'tickle', anu.url3 + 'tickle/gif'])
return anu.pict(tickle)
@classmethod
def facepalm(anu):
return anu.pict(anu.url1 + 'facepalm')
@classmethod
def happy(anu):
happy = choice([anu.url1 + 'happy', anu.url2 + 'happy'])
return anu.pict(happy)
@classmethod
def highfive(anu):
return anu.pict([anu.url1 + 'highfive'])
@classmethod
def wink(anu):
wink = choice([anu.url1 + 'wink', anu.url2 + 'wink'])
return anu.pict(wink)
@classmethod
def awoo(anu):
return anu.pict(anu.url2 + 'awoo')
@classmethod
def bonk(anu):
return(anu.url2 + 'bonl')
@classmethod
def smug(anu):
return anu.pict(choice([anu.url2 + 'smug', anu.url1 + 'smug']))
@classmethod
def handhold(anu):
return anu.pict(anu.url2 + 'handhold')
@classmethod
def kill(anu):
return anu.pict(anu.url2 + 'kill')
@classmethod
def cringe(anu):
return anu.pict(anu.url2 + 'cringe')
@classmethod
def wave(anu):
wave = choice([anu.url1 + 'wave', anu.url2 + 'wave'])
return anu.pict(wave)
@classmethod
def bully(anu):
return anu.pict(anu.url2 + 'bully')
@classmethod
def pout(anu):
return anu.pict(anu.url1 + 'pout')
@classmethod
def shrug(anu):
return anu.pict(anu.url1 + 'shrug')
@classmethod
def sleep(anu):
return anu.pict(anu.url1 + 'sleep')
@classmethod
def stare(anu):
return anu.pict(anu.url1 + 'stare')
@classmethod
def think(anu):
return anu.pict(anu.url1 + 'think')
@classmethod
def thumsup(anu):
return anu.pict(anu.url1 + 'thumsup')
@classmethod
def yeet(anu):
return anu.pict(anu.url2 + 'yeet')
@classmethod
def nom(anu):
return anu.pict(anu.url2 + 'nom')
@classmethod
def glomp(anu):
return anu.pict(anu.url2+ 'glomp')
@classmethod
def kick(anu):
return anu.pict(anu.url2 + 'kick')
class NSFW:
url1 = 'https://nekos.life/api/v2/img/'
url2 = 'https://api.waifu.pics/nsfw/'
url3 = 'https://purrbot.site/api/img/nsfw/'
@classmethod
def nsfw_pict(anu, url: str) -> str:
re = get(url).json()
try:
return re['url']
except:
return re['link']
@classmethod
def anal(anu):
return anu.nsfw_pict(anu.url3 + 'anal/gif')
@classmethod
def blowjob(anu):
return anu.nsfw_pict(choice([anu.url2 + 'blowjob', anu.url3+ 'blowjob/gif']))
@classmethod
def cum(anu):
return anu.nsfw_pict(choice([anu.url3 + 'cum/gif', anu.url1 + 'cum']))
@classmethod
def waifu(anu):
return anu.nsfw_pict(anu.url2 + 'waifu')
@classmethod
def neko(anu):
return anu.nsfw_pict(choice([anu.url2 + 'neko', anu.url3 + 'neko/gif', anu.url3 + 'neko/img', anu.url1 + 'nsfw_neko_gif']))
@classmethod
def trap(anu):
return anu.nsfw_pict(anu.url2 + 'trap')
@classmethod
def yaoi(anu):
return anu.nsfw_pict(anu.url3 + 'yaoi/gif')
@classmethod
def yuri(anu):
return anu.nsfw_pict(anu.url3 + 'yuri/gif')
@classmethod
def random(anu):
return anu.nsfw_pict(anu.url3 + 'fuck/gif')
@classmethod
def solo(anu):
return anu.nsfw_pict(anu.url3 + 'solo/gif')
@classmethod
def pussylick(anu):
return anu.nsfw_pict(anu.url3 + 'pussylick/gif')
@classmethod
def solog(anu):
return anu.nsfw_pict(anu.url1 + 'solog')
@classmethod
def smallboobs(anu):
return anu.nsfw_pict(anu.url1 + 'smallbooobs')
@classmethod
def lewdkemo(anu):
return anu.nsfw_pict(anu.url1 + 'lewdkemo')
@classmethod
def woof(anu):
return anu.nsfw_pict(anu.url1 + 'woof')
@classmethod
def gasm(anu):
return anu.nsfw_pict(anu.url1 + 'gasm')
@classmethod
def hentai(anu):
return anu.nsfw_pict(anu.url1 + 'hentai')
@classmethod
def ero(anu):
return anu.nsfw_pict(anu.url1 + 'ero')
@classmethod
def hololewd(anu):
return anu.nsfw_pict(anu.url1 + 'hololewd')
@classmethod
def random_gif(anu):
return anu.nsfw_pict(anu.url1 + 'Random_hentai_gif')
@classmethod
def eroyuri(anu):
return anu.nsfw_pict(anu.url1 + 'eroyuri')
|
Anipick
|
/Anipick-1.7.0-py3-none-any.whl/anipick/pict.py
|
pict.py
|
from rich.console import Console
import anipick
import argparse
import tabulate
def main():
parse = argparse.ArgumentParser()
parse.add_argument('-a', '--anime', metavar='title', help='get info of anime', nargs='+')
parse.add_argument('-m', '--manga', metavar='title', help='get manga info', nargs='+')
parse.add_argument('-ly', '--lyrics', metavar='keyword', help='get lyrics of the anime song', nargs='+')
parse.add_argument('-s', '--song', help='usage: False/True')
parse.add_argument('-tf', '--tablefmt', help='setting the table fmt')
parse.add_argument('-q', '--quote',action='store_true' ,help='random quotes nime', required=False)
parse.add_argument('-c', '--char', help='get character info', nargs='+')
parse.add_argument('--author', action='version', version='Kenzawa/Babwa')
parse.add_argument('-V', '--version', action='version', version='1.7.0')
parse.add_argument('-nc', '--ncode', metavar='keyword', nargs='+', help='get ncode')
parse.add_argument('-sn', '--seasonal', action='store_true', help='get title anime in specified seasonal')
args = parse.parse_args()
if args.anime:
' '.join(args.anime)
title = anipick.Animegraphy(args.anime)
name = title.title
genre = title.genre
eps = title.eps
aired = title.aired
studio = title.studio
id = title.mal_id
broadcast = title.broadcast
adaptation = title.adaptation
duration = title.duration
nsfw = title.nsfw_scan()
score = title.score
rank = title.rank
sequel = title.sequel
status = title.status
rated = title.rated
try:
op = title.op_song
ed = title.ed_song
op = op.split('#')[0:3]
op = ', '.join(op)
ed = ed.split('#')[0:3]
ed = ', '.join(ed)
except:
op = 'Opening Song Not Found'
ed = 'Ending Song Not Found'
table = [['Anime', name], ['MAL ID', id], ['Status', status], ['Episode', eps], ['Aired', aired], ['Studio', studio],['Broadcast', broadcast], ['Duration', duration], ['NSFW', nsfw], [
'Sequel', sequel], ['Score', score], ['RANK', rank], ['Genre', genre],['Adaptation', adaptation], ['Rated', rated]]
try:
if 'rue' in args.song:
table = [['Anime', name], ['MAL ID', id], ['Status', status], ['Episode', eps], ['Aired', aired], ['Studio', studio],['Broadcast', broadcast], ['Duration', duration], ['NSFW', nsfw], [
'Sequel', sequel], ['Score', score], ['RANK', rank], ['Genre', genre],['Adaptation', adaptation], ['Rated', rated], ['Opening', op], ['Ending', ed]]
if 'lse' in args.song:
table = [['Anime', name], ['MAL ID', id], ['Status', status], ['Episode', eps], ['Aired', aired], ['Studio', studio],['Broadcast', broadcast], ['Duration', duration], ['NSFW', nsfw], [
'Sequel', sequel], ['Score', score], ['RANK', rank], ['Genre', genre],['Adaptation', adaptation], ['Rated', rated]]
except:
table = [['Anime', name], ['MAL ID', id], ['Status', status], ['Episode', eps], ['Aired', aired], ['Studio', studio],['Broadcast', broadcast], ['Duration', duration], ['NSFW', nsfw], [
'Sequel', sequel], ['Score', score], ['RANK', rank], ['Genre', genre],['Adaptation', adaptation], ['Rated', rated]]
if args.tablefmt:
print(tabulate.tabulate(table, headers='firstrow', tablefmt=args.tablefmt))
else:
print(tabulate.tabulate(table, headers='firstrow'))
if args.manga:
' '.join(args.manga)
manga = anipick.Mangaography(args.manga)
title = f'{manga.name} ({manga.name_jp})'
aired = manga.aired
genre = manga.genres
author = manga.author
status = manga.status
publish = manga.is_publishing
serialization = manga.serialization
chapter = manga.chapter
volume = manga.volume
score = manga.score
id = manga.manga_id
related = manga.related
table = [['Manga', title], ['MAL ID', id], ['Score', score], ['Publish', publish], ['Author', author], ['Status', status],
['Chapter', chapter], ['Volume', volume], ['Genre', genre], ['Related', related], ['Serialization', serialization], ['Aired', aired]]
if args.tablefmt:
print(tabulate.tabulate(table, headers='firstrow', tablefmt=args.tablefmt))
else:
print(tabulate.tabulate(table, headers='firstrow'))
if args.quote:
quotenime = anipick.Quotenime()
anime = quotenime.anime
quote = quotenime.quote
char = quotenime.char
table = [['Anime', anime], ['Character', char], ['Quotes', quote]]
if args.tablefmt:
print(tabulate.tabulate(table, headers='firstrow', tablefmt=args.tablefmt))
else:
print(tabulate.tabulate(table, headers='firstrow'))
if args.lyrics:
' '.join(args.lyrics)
query = anipick.Lyricspedia(args.lyrics)
romaji = query.lyrics_romaji
print(query.name ,romaji, sep='\n')
if args.char:
' '.join(args.char)
char = anipick.Charapedia(args.char)
name = char.name
nickname = char.nickname
about = f'Height: {char.height}, Weight: {char.weight}, Birthday: {char.birthday}'
id = char.mal_char_id
animm = str(char.anime)
animm = animm.replace(',', ',:').split(':')[0:5]
animm = ', '.join(animm)
mangaa = str(char.manga)
mangaa = mangaa.replace(',', ',:').split(':')[0:5]
mangaa = ', '.join(mangaa)
table = [['Name', name], ['NickName', nickname], ['MAL CHAR ID', id], ['About:', about], ['Anime', animm], ['Manga', mangaa]]
if args.tablefmt:
print(tabulate.tabulate(table, headers='firstrow', tablefmt=args.tablefmt))
else:
print(tabulate.tabulate(table, headers='firstrow'))
if args.ncode:
' '.join(args.ncode)
code = anipick.HGEN('tobat_yuk_bang', query=args.ncode)
print(code.nuclear)
if args.seasonal:
resultt = anipick.Seasonal(limit=5)
table = [['Year', resultt.year], ['Season', resultt.season], ['anime', resultt.name]]
if args.tablefmt:
print(tabulate.tabulate(table, tablefmt=args.tablefmt))
else:
print(tabulate.tabulate(table))
if __name__ == '__main__':
main()
|
Anipick
|
/Anipick-1.7.0-py3-none-any.whl/anipick/isi.py
|
isi.py
|
import requests
from googlesearch import search
from bs4 import BeautifulSoup
from .error_handling import SearchNotWork, NoResultFound
class Charapedia:
def __init__(self, char: str):
char = ''.join(char)
try:
mal_char_id = search('site:myanimelist.net {} character info inurl:/character/'.format(char), num_results=0)
except SearchNotWork:
raise SearchNotWork('Search Library Not Work')
try:
mal_char_id = ''.join(mal_char_id).split('/')[4]
except:
raise NoResultFound('Character Not Found')
self.mal_char_id = mal_char_id
base_api = 'https://api.jikan.moe/v3/character/{}/'.format(self.mal_char_id)
r = requests.get(base_api)
result = r.json()
self.result = result
#Caharcter Name
try:
name = result['name']
name = f'{name} ({result["name_kanji"]})'
except KeyError:
raise NoResultFound(f'{char} is not Anime characters or u typo')
self.name = name or None
#url name
url = result['url']
self.url = url or None
#image url
image_url = result['image_url']
self.image_url = image_url or None
#about
about = result['about']
if 'No biography written.' in about:
self.age = about
about = ''.join(about)
self.about = about
self.anu = self.about.split('\n')
#age
try:
age = self.anu[0].split('Age: ')[1]
except:
age = 'Age biography not written.'
self.age = age
#birthday
try:
try:
birthday = self.anu[1].split('Birthday: ')[1]
except:
birthday = self.anu[0].split('Birthday: ')[1]
except:
birthday = 'Birthday biography not written'
self.birthday = birthday
#height
try:
try:
height = self.anu[1].split('Height: ')[1]
except:
try:
height = self.anu[2].split('Height: ')[1]
except:
height = self.anu[3].split('Height:')[1]
except:
height = 'Height biography not written'
self.height = height
#weight
try:
try:
weight = self.anu[1].split('Weight: ')[1]
except:
try:
weight = self.anu[2].split('Weight: ')[1]
except:
weight = self.anu[3].split('Weight:')[1]
except:
weight = 'weight biography not written'
self.weight = weight
#nickname
nickname = result['nicknames']
nickname = ', '.join(nickname)
if ',' not in nickname:
nickname = 'None'
self.nickname = nickname
#anime reference
@property
def anime(self) -> list:
anime = []
for nama in self.result['animeography']:
anime.append(nama['name'])
anime = ', '.join(anime)
return anime or None
#manga reference
@property
def manga(self) -> list:
manga = []
for nama in self.result['mangaography']:
manga.append(nama['name'])
manga = ', '.join(manga)
return manga or None
|
Anipick
|
/Anipick-1.7.0-py3-none-any.whl/anipick/char.py
|
char.py
|
import requests
from googlesearch import search
from .error_handling import NoResultFound
from requests.exceptions import HTTPError
class Animegraphy:
def __init__(self, title: str):
title = ''.join(title)
try:
mal_id = search('site:myanimelist.net {} anime info inurl:anime/'.format(title), num_results=0)
except HTTPError:
raise HTTPError('Too many requests')
if not mal_id:
raise NoResultFound('Not found')
try:
mal_id = ''.join(mal_id).split('/')[4]
except:
raise KeyError('Not found')
self.mal_id = mal_id
url = f'https://api.jikan.moe/v3/anime/{self.mal_id}/'
r = requests.get(url)
resu = r.json()
self.resu = resu or None
#Anime Title
title = resu['title']
self.title = title or None
#anime url
anime_url = resu['url']
self.anime_url = anime_url or None
#image anime url
image_url = resu['image_url']
self.image_url = image_url or None
#rated anime, example = PG - 13
rated = resu['rating']
self.rated = rated or None
#type anime, example = TV
type_nime = resu['type']
self.type = type_nime or None
#score rating anime
score = resu['score']
self.score = score or None
#episode anime
eps = resu['episodes']
self.eps = eps or None
#opening song
op_song = resu['opening_themes']
op_song = '"'.join(op_song)
self.op_song = op_song or None
#ending song
ed_song = resu['ending_themes']
ed_song = '"'.join(ed_song)
self.ed_song = ed_song or None
#aired
aired = resu['aired']['string']
self.aired = aired or None
#ranking
ranknim = resu['rank']
self.rank = ranknim or None
#popularity
popularity = resu['popularity']
self.popularity = popularity or None
#synopsis
synopsis = resu['synopsis']
self.synopsis = synopsis or None
#trailer url(EMBED)
trailer_url = resu['trailer_url']
try:
trailer_url = trailer_url.split('/')[4].split('?')[0]
trailer_url = f'https://youtube.com/watch?v={trailer_url}'
except:
self.trailer_url = 'None'
self.trailer_url = trailer_url or None
#status anime
status = resu['status']
self.status = status or None
#premiered
premiered = resu['premiered']
self.premiered = premiered or None
#broadcast
broadcast = resu['broadcast']
self.broadcast = broadcast or None
#adaptation anime type
try:
adaptation_type = resu['related']['Adaptation'][0]['type']
except:
adaptation_type = "This Anime Doesn't Have A Manga/Anime Adaptation"
self.adaptation_type = adaptation_type or None
#adaptation anime name
try:
adaptation_name = resu['related']['Adaptation'][0]['name']
except:
adaptation_name = "This Anime Doesn't Have A Manga/Anime Adaptation"
self.adaptation_name = adaptation_name or None
#sequel name
try:
sequel_name = resu['related']['Sequel'][0]['name']
except:
sequel_name = "This Anime Doesn't Have A Anime Sequel"
self.sequel = sequel_name or None
#sequel url
try:
sequel_url = resu['related']['Sequel'][0]['url']
except:
sequel_url = "This Anime Doesn't Have A Anime Sequel"
self.sequel_url = sequel_url or None
#adaptation
adaptation = f' {adaptation_type}, {adaptation_name}'
self.adaptation = adaptation or None
#favorite
favorite = resu['favorites']
self.favorite = favorite or None
#duration
duration = resu['duration']
self.duration = duration or None
#source
source = resu['source']
self.source = source or None
#background
background = resu['background']
self.background = background or None
@property
def studio(self):
studio = []
for nama in self.resu['studios']:
studio.append(nama['name'])
studio = ', '.join(studio)
return studio or None
@property
def genre(self) -> list:
genre = []
for nama in self.resu['genres']:
genre.append(nama['name'])
genre = ', '.join(genre)
return genre or None
@property
def producers(self):
producers = []
for nama in self.resu['producers']:
producers.append(nama['name'])
producers = ', '.join(producers)
return producers or None
@property
def recommend(self):
recommend = []
url = f'https://api.jikan.moe/v3/anime/{self.mal_id}/recommendations'
su = requests.get(url)
sult = su.json()
self.sult = sult
for nama in self.sult['recommendations']:
recommend.append(nama['title'])
recommend = recommend[:5]
recommend = '; '.join(recommend)
return recommend or None
def nsfw_scan(self) -> bool:
return any(a in self.rated for a in ['R', '17'])
|
Anipick
|
/Anipick-1.7.0-py3-none-any.whl/anipick/animpedia.py
|
animpedia.py
|
import requests
from googlesearch import search
from bs4 import BeautifulSoup
from .error_handling import SearchNotWork, NoResultFound
class Mangaography:
def __init__(self, title: str):
title = ''.join('title')
try:
manga_id = search(f'site:myanimelist.net {title} inurl:/manga/ manga info', num_results=0)
except:
raise SearchNotWork('Search Library not work/connection not found')
try:
manga_id = ''.join(manga_id).split('/')[4]
except:
raise NoResultFound('No Result Found')
self.manga_id = manga_id or None
base_url = f'https://api.jikan.moe/v3/manga/{self.manga_id}'
url = requests.get(base_url)
result = url.json()
self.result = result
#manga name
name = result['title']
self.name = name or None
#manga en name
name_en = result['title_english']
self.name_en = name_en or None
#manga jp name
name_jp = result['title_japanese']
self.name_jp = name_jp
#anime url
manga_url = result['url']
self.manga_url = manga_url or None
#image anime url
image_url = result['image_url']
self.image_url = image_url or None
#type anime, example = TV
type = result['type']
self.type = type or None
#score rating anime
score = result['score']
self.score = score or None
#aired
aired = result['published']['string']
self.aired = aired or None
#ranking
ranking = result['rank']
self.ranking = ranking or None
#popularity
popularity = result['popularity']
self.popularity = popularity or None
#synopsis
synopsis = result['synopsis']
self.synopsis = synopsis or None
#publishing
is_publishing = result['publishing']
self.is_publishing = is_publishing or None
#volumes
volume = result['volumes']
self.volume = volume
#chapter
chapter = result['chapters']
self.chapter = chapter
#member
member = result['members']
self.member = member
#favorites
favorite = result['favorites']
self.favorite = favorite or None
#status
status = result['status']
self.status = status
#author
@property
def author(self) -> list:
author = []
for nama in self.result['authors']:
author.append(nama['name'])
author = ', '.join(author)
return author or None
#genre
@property
def genres(self) -> list:
genres = []
for nama in self.result['genres']:
genres.append(nama['name'])
genres = ', '.join(genres)
return genres or None
#related
@property
def related(self) -> list:
related = []
try:
for nama in self.result['related']['Adaptation']:
related.append(nama['name'])
except:
for nama in self.result['related']['Alternative version']:
related.append(nama['name'])
related = ', '.join(related)
return related or None
#serializations
@property
def serialization(self) -> list:
serialization = []
for nama in self.result['serializations']:
serialization.append(nama['name'])
serialization = ', '.join(serialization)
return serialization or None
|
Anipick
|
/Anipick-1.7.0-py3-none-any.whl/anipick/manga.py
|
manga.py
|
class Genshinchar:
help = '''
endpoints:
character('name')
character('id')
character('slug')
character('description')
character('weapon')
character('obtain')
character('gender')
character('rarity')
character('birthday')
character('vision')
'''
def __init__(self, query: str):
char = [
{"id":6,"name":"Diluc","slug":"diluc","description":"The tycoon of a winery empire in Mondstadt, unmatched in every possible way.","gender":"male","birthday":"April 30th","rarity":5,"vision":"pyro","weapon":"claymore","obtain":"Wish"},
{"id":8,"name":"Jean","slug":"jean","description":"The righteous and rigorous Dandelion Knight, and Acting Grand Master of the Knights of Favonius in Mondstadt.","gender":"female","birthday":"March 14th","rarity":5,"vision":"anemo","weapon":"sword","obtain":"Wish"},
{"id":10,"name":"Keqing","slug":"keqing","description":"The Yuheng of the Liyue Qixing. Has much to say about Rex Lapis unilateral approach to policymaking in Liyue - but in truth, gods admire skeptics such as her quite a lot.","gender":"female","birthday":"Nov 20th","rarity":5,"vision":"electro","weapon":"sword","obtain":"Wish"},
{"id":11,"name":"Klee","slug":"klee","description":"An explosives expert and a regular at the Knights of Favonius\r\n confinement room. Also known as Fleeing Sunlight.","gender":"female","birthday":"Jul 27th","rarity":5,"vision":"pyro","weapon":"catalyst","obtain":"Wish"},
{"id":13,"name":"Mona","slug":"mona","description":"A mysterious young astrologer who proclaims herself to be Astrologist Mona Megistus, and who possesses abilities to match the title.","gender":"female","birthday":"Aug 31st","rarity":5,"vision":"hydro","weapon":"catalyst","obtain":"Wish"},
{"id":16,"name":"Qiqi","slug":"qiqi","description":"An apprentice and herb-picker Bubu Pharmacy. An undead with a bone-white complexion, she seldom has much in the way of words or emotion.","gender":"female","birthday":"March 3rd","rarity":5,"vision":"cryo","weapon":"sword","obtain":"Wish"},
{"id":21,"name":"Venti","slug":"venti","description":"One of the many bards of Mondstadt, who freely wanders the citys streets and alleys.","gender":"male","birthday":"Jun 16th","rarity":5,"vision":"anemo","weapon":"bow","obtain":"Wish"},
{"id":23,"name":"Xiao","slug":"xiao","description":"A yaksha adeptus that defends Liyue. Also heralded as the Conqueror of Demons or Vigilant Yaksha.","gender":"male","birthday":"April 17th","rarity":5,"vision":"anemo","weapon":"polearm","obtain":"Wish"},
{"id":26,"name":"Tartaglia","slug":"tartaglia","description":"Cunning Snezhnayan whose unpredictable personality keeps people guessing his every move.","gender":"male","birthday":"July 20th","rarity":5,"vision":"hydro","weapon":"bow","obtain":"Unknown"},
{"id":28,"name":"Zhongli","slug":"zhongli","description":"A mysterious guest invited by the Wangsheng Funeral Parlor. Extremely knowledgeable in all things.","gender":"male","birthday":"Dec 31st","rarity":5,"vision":"geo","weapon":"polearm","obtain":"Unknown"},
{"id":1,"name":"Amber","slug":"amber","description":"Always energetic and full of life, Ambers the best - albeit only - Outrider of the Knights of Favonius.","gender":"female","birthday":"Aug 24th","rarity":4,"vision":"pyro","weapon":"bow","obtain":"Quest"},
{"id":2,"name":"Barbara","slug":"barbara","description":"Every denizen of Mondstadt adores Barbara. However, she learned the word idol from a magazine.","gender":"female","birthday":"Jul 5th","rarity":4,"vision":"hydro","weapon":"catalyst","obtain":"Wish"},
{"id":3,"name":"Beidou","slug":"beidou","description":"Beidou is the leader of the Crux an armed fleet based in Liyue Harbor. An armed fleet means exactly what it sounds like: a fleet of ships armed to the teeth.","gender":"female","birthday":"Feb 14th","rarity":4,"vision":"electro","weapon":"claymore","obtain":"Wish"},
{"id":4,"name":"Bennett","slug":"bennett","description":"A righteous and good-natured adventurer from Mondstadt whos unfortunately extremely unlucky.","gender":"male","birthday":"Feb 29th","rarity":4,"vision":"pyro","weapon":"sword","obtain":"Wish"},
{"id":5,"name":"Chongyun","slug":"chongyun","description":"A young exortcist from a family of exorcists. He does everything he can to suppress his pure positive energy.","gender":"male","birthday":"Sep 7th","rarity":4,"vision":"cryo","weapon":"claymore","obtain":"Wish"},
{"id":7,"name":"Fischl","slug":"fischl","description":"A mysterious girl who calls herself Prinzessia der Verurteilung and travels with a night raven named Oz.","gender":"female","birthday":"May 27th","rarity":4,"vision":"electro","weapon":"bow","obtain":"Wish"},
{"id":9,"name":"Kaeya","slug":"kaeya","description":"A thinker in the Knights of Favonius with a somewhat exotic appearance.","gender":"male","birthday":"Nov 30th","rarity":4,"vision":"cryo","weapon":"sword","obtain":"Quest"},
{"id":12,"name":"Lisa","slug":"lisa","description":"The languid but knowledgeable Librarian of the Knights of Favonius who was deemed by Sumeru Academia to be their most distinguised graduate in the past two centuries.","gender":"female","birthday":"Jun 9th","rarity":4,"vision":"electro","weapon":"catalyst","obtain":"Quest"},
{"id":14,"name":"Ningguang","slug":"ningguang","description":"The Tianquan of Liyue Qixing. Her wealth is unsurpassed in all of Teyvat.","gender":"female","birthday":"Aug 26th","rarity":4,"vision":"geo","weapon":"catalyst","obtain":"Wish"},
{"id":15,"name":"Noelle","slug":"noelle","description":"A maid in the service of the Knights of Favonius that dreams of joining their ranks someday.","gender":"female","birthday":"March 21st","rarity":4,"vision":"geo","weapon":"claymore","obtain":"Wish"},
{"id":17,"name":"Razor","slug":"razor","description":"A boy who lives among the wolves in Wolvendom of Mondstadt, away from human civilization. As agile as lightning.","gender":"male","birthday":"Sep 9th","rarity":4,"vision":"electro","weapon":"claymore","obtain":"Wish"},
{"id":18,"name":"Sucrose","slug":"sucrose","description":"An alchemist filled with curiosity about all things. She researches bio-alchemy.","gender":"male","birthday":"May 11th","rarity":4,"vision":"anemo","weapon":"catalyst","obtain":"Wish"},
{"id":22,"name":"Xiangling","slug":"xiangling","description":"A renowned chef from Liyue. Shes extremely passionate about cooking and excels at making her signature hot and spicy dishes.","gender":"female","birthday":"Nov 2nd","rarity":4,"vision":"pyro","weapon":"polearm","obtain":"Wish"},
{"id":24,"name":"Xingqiu","slug":"xingqiu","description":"A young man carrying a longsword who is frequently seen at book booths. He has a chivalrous heart and yearns for justice and fairness for all.","gender":"male","birthday":"Oct 9th","rarity":4,"vision":"hydro","weapon":"sword","obtain":"Wish"},
{"id":25,"name":"Diona","slug":"diona","description":"A young lady who has inherited trace amounts of non-human blood. She is the incredible popular bartender of the Cats Tail tavern.","gender":"female","birthday":"Jan 18th","rarity":4,"vision":"cryo","weapon":"bow","obtain":"Unknown"},
{"id":27,"name":"Xinyan","slug":"xinyan","description":"Liyues sole rock 'n' roll musician. She rebels against ossified prejudices using her music and passionate singing.","gender":"female","birthday":"Nov 4th","rarity":4,"vision":"pyro","weapon":"claymore","obtain":"Unknown"},
{"id":28, "name":"Kamisato Ayaka", "slug":"ayaka", "description":"Daughter of the Yashiro Commission's Kamisato Clan. Dignified and elegant, as well as wise and strong.", "gender": "female", "birthday":"Sep 28th", "rarity":5, "vision":"Cryo", "weapon":"sword", "obtain":"wish"},
{"id":29, "name":"Raiden Shogun", "slug":"baal", "description":"Her Excellency, the Almighty Narukami Ogosho, who promised the people of Inazuma an unchanging Eternity.", "gender":"female", "birthday":"Jun 2th", "rarity":5, "vision":"archon electro", "weapon":"lightning", "obtain":"wish"}
]
try:
diluc = char[0][query]
self.diluc = diluc or None
jean = char[1][query]
self.jean = jean or None
keqing = char[2][query]
self.keqing = keqing or None
klee = char[3][query]
self.klee = klee
mona = char[4][query]
self.mona = mona or None
qiqi = char[5][query]
self.qiqi = qiqi or None
venti = char[6][query]
self.venti = venti or None
xiao = char[7][query]
self.xiao = xiao or None
tartanglia = char[8][query]
self.tartanglia = tartanglia or None
zhongli = char[9][query]
self.zhongli = zhongli or None
amber = char[10][query]
self.amber = amber or None
barbara = char[11][query]
self.barbara = barbara or None
beidou = char[12][query]
self.beidou = beidou or None
bennnett = char[13][query]
self.bennett = bennnett or None
chongyun = char[14][query]
self.chongyun = chongyun or None
fischl = char[15][query]
self.fischl = fischl or None
kaeya = char[16][query]
self.kaeya = kaeya or None
lisa = char[17][query]
self.lisa = lisa or None
ningguang = char[18][query]
self.ningguang = ningguang or None
noelle = char[19][query]
self.noelle = noelle or None
razor = char[20][query]
self.razor = razor or None
sucrose = char[21][query]
self.sucrose = sucrose or None
xiangling = char[22][query]
self.xianling = xiangling or None
xiangqiu = char[23][query]
self.xiangqiu = xiangqiu or None
diona = char[24][query]
self.diona = diona or None
xinyan = char[25][query]
self.xinyan = xinyan or None
ayaka = char[26][query]
self.ayaka = ayaka or None
baal = char[27][query]
self.baal = baal or None
except KeyError:
raise KeyError('''Not in endpoints, for help in help, example: id = character('id'), print(id.help), or print(character.help''')
|
Anipick
|
/Anipick-1.7.0-py3-none-any.whl/anipick/genshin.py
|
genshin.py
|
# Anisearch
Anilist API module for python. you only need to copy the Anilist folder to your own script.
### Executing program
* How to run the program
* Import module
```python
from Anisearch import Anilist
instance = Anilist()
```
From there you can get information from Anilist using their new GraphQL API.
To get data on a known ID.
```python
instance.get.anime(13601) # Return data on PSYCHO-PASS
instance.get.manga(64127) # Return data on Mahouka Koukou no Rettousei
instance.get.staff(113803) # Return data on Kantoku
instance.get.studio(7) # Return data on J.C. Staff
```
Searching is also making a return.
```python
instance.search.anime("Sword") # Anime search results for Sword.
instance.search.manga("Sword") # Manga search results for Sword.
instance.search.character("Tsutsukakushi") # Character search results for Tsutsukakushi.
instance.search.staff("Kantoku") # Staff search results for Kantoku.
instance.search.studio("J.C. Staff") # Studio search result for J.C. Staff.
```
A note about the searching and getting:
```python
search(term, page = 1, perpage = 10)
get(item_id)
```
Pagination is done automatically in the API. By default, you'll get 10 results per page.
If you want more, just change the per page value. pageInfo is always the first result in the returned data.
Pages start at 1 and if you want another page, just replace page with the next number.
query_string is to set what info you want to display.
### Customization
You can set your own settings as follows
```python
import logging
from Anisearch import Anilist
# for init instance
SETTINGS = {
'header': {
'Content-Type': 'application/json',
'User-Agent': 'Anisearch (github.com/MeGaNeKoS/Anisearch)',
'Accept': 'application/json'},
'api_url': 'https://graphql.anilist.co'
}
request_param = {} # this is for the requests lib parameters.
instance = Anilist(log_level=logging.INFO, settings = SETTINGS, request_param = request_param)
# for instance get/search parameters
retry = 10
instance.get.anime(13601, num_retries=retry) # default 10
```
### Todo
* Add more error handling when the API returns an error.
- currently is limited to 429 too many requests. You can help me by providing a log when other errors occur.
|
Anisearch
|
/Anisearch-1.1.0.tar.gz/Anisearch-1.1.0/README.md
|
README.md
|
# AnisoCADO
[](https://github.com/AstarVienna/AnisoCADO/actions/workflows/tests.yml)
[](https://anisocado.readthedocs.io/en/latest/?badge=latest)
[](https://github.com/AstarVienna/AnisoCADO)
[](https://www.gnu.org/licenses/gpl-3.0)
A python package to generate off-axis PSFs for the SCAO mode for the ELT
Please note: this package is not yet finished yet! The code is fine, but the
documentation is lacking.
## Documentation
Apropos documentation. It can be found here:
[https://anisocado.readthedocs.io/en/latest/index.html](https://anisocado.readthedocs.io/en/latest/index.html)
|
AnisoCADO
|
/AnisoCADO-0.3.0.tar.gz/AnisoCADO-0.3.0/README.md
|
README.md
|
import numpy as np
from . import pupil_utils
# ____ _____ _ ____ __ __ _____
# | _ \| ____| / \ | _ \| \/ | ____|
# | |_) | _| / _ \ | | | | |\/| | _|
# | _ <| |___ / ___ \| |_| | | | | |___
# |_| \_\_____/_/ \_\____/|_| |_|_____|
"""
Hello.
This files contains some useful functions related to psf generation.
They are just raw, so that you can insert them into your own classes
as desired.
The functions are commented to help you understand what's going on, when
you need to modify them.
Don't read that code anyway.
Go right now to [the file _anisocado.py].
It contains examples that will show you how to use the functions, what they do,
etc.
"""
def defineDmFrequencyArea(kx, ky, rotdegree, dactu=0.5403):
"""
<kx> : spatial frequency produced by the
function computeSpatialFreqArrays() (metres^-1)
<ky> : idem kx
<rotdegree> : rotation of M4 (degrees)
<dactu> : value of actuator pitch of M4 (metres)
The function returns the 2D domain of spatial frequencies which can be
impacted by M4, i.e. the M4 compensation domain.
Underlying assumtions are than M4 is of an hexagonal pattern, with an
inter-actuator distance <dactu> set by default to 54.03 cm.
Result is returned with a frequency corner-centred representation.
Standalone example::
N = 512 # output will be 512x512
pixelSize = 4.2 # 4.2 mas
wavelength = 1.65e-6 # metres
kx, ky = computeSpatialFreqArrays(N, pixelSize, wavelength)
M4 = defineDmFrequencyArea(kx, ky, 0)
plt.imshow(np.fft.fftshift(M4).T, origin='l')
"""
# cut-off frequency
fc = (1./np.sqrt(3)) / dactu
# mask frequency definition
A = np.pi/3 # 60 degrees
A0 = rotdegree * np.pi / 180
msk = np.abs(np.cos(A0)*ky+np.sin(A0)*kx)<fc
msk = np.logical_and(msk, np.abs(np.cos(A+A0)*ky+np.sin(A+A0)*kx)<fc)
msk = np.logical_and(msk, np.abs(np.cos(2*A+A0)*ky+np.sin(2*A+A0)*kx)<fc)
k = np.sqrt(kx**2 + ky**2)
msk = np.logical_and(msk, k < (fc*1.115))
return msk
def computeSpatialFreqArrays(N, pixelSize, wavelength):
"""
<N> : size of the output image (NxN)
<pixelSize> : size of the pixels of the psf image (mas)
<wavelength> : wavelength (metres)
The function returns a tuple of spatial frequencies (m^-1) in X and Y
together with the pixel scale of these (the value will be useful later
on in other procedures).
Results of 2D arrays are returned with a Fourier corner-centred
representation.
Standalone example::
N = 512 # output will be 512x512
pixelSize = 4.2 # 4.2 mas
wavelength = 1.65e-6 # metres
kx, ky, uk = computeSpatialFreqArrays(N, pixelSize, wavelength)
"""
# array of indices centred 'as expected' by Fourier frequencies, in 1D
k1d = np.fft.fftshift(np.arange(N) - (N//2))
# Proper scaling to transform indices in spatial frequencies.
# dX = pixelSize * 4.84813681109536e-09 # from mas to radians
dX = pixelSize * 4.84813681109536e-06 # from arcsec to radians
uk = dX / wavelength
k1d = k1d * uk # now this is a spatial freq in metres^-1
# now creating 2D arrays of spatial frequency
kx, ky = np.meshgrid(k1d, k1d, indexing='ij') # for convention [x,y]
return kx, ky, uk
def computeWiener(kx, ky, L0, r0):
"""
<kx> : spatial frequency produced by
the function computeSpatialFreqArrays() (metres^-1)
<ky> : idem kx
<L0> : value of the outer scale (metres)
<r0> : value of Fried parameter r0 (metres)
The function returns the 2D spectrum of the turbulence with an outer
scale L0. It is expressed in rad^2.m^2.
Result is returned with a frequency corner-centred representation.
Standalone example::
N = 512 # output will be 512x512
pixelSize = 4.2 # 4.2 mas
wavelength = 1.65e-6 # metres. H band.
L0 = 25. # 25 metres outer scale
r0 = 0.6 # r0 = 60cm in H band
kx, ky, uk = computeSpatialFreqArrays(N, pixelSize, wavelength)
W = computeWiener(kx, ky, L0, r0)
"""
# computation of Wiener spectrum expressed in radians^2 (at the wavelength
# where r0(lambda) is expressed !)
Wiener = (kx**2 + ky**2 + 1./L0**2.)**(-11./6)
Wiener *= 0.0228956 * r0**(-5./3)
# frequency 0 set to 0. It's wrong anyway.
Wiener[0, 0] = 0.
return Wiener
def anisoplanaticSpectrum(Cn2h, layerAltitude, L0, offx, offy, wavelength,
kx, ky, Wiener, M4):
"""
<Cn2h> : list of normalised (np.sum(Cn2h)==1.0) strengh of
turbulence of each layer (no unit)
<layerAltitude> : list of altitudes of the turbulence layers (metres)
<L0> : value of the outer scale (metres)
<offx> : off-axis distance of the star along X axis (arcsec)
<offy> : idem along Y axis (arcsec)
<wavelength> : wavelength (metres)
<kx> : spatial frequency produced by
the function computeSpatialFreqArrays() (metres^-1)
<ky> : idem kx
<Wiener> : turbulent spectrum from function computeWiener()
<M4> : frequency domain of M4 (boolean) computed by the
function M4 = defineDmFrequencyArea(kx, ky, 0)
Computes the phase spectrum (spatial frequencies) contribution due to the
anisoplanatism in the AO compensation for a source located off-axis from
the SCAO guide star by some amount (offx, offy).
In input, the distribution of the turbulence in altitude is given.
Result is returned with a frequency corner-centred representation.
Example::
N = 512
pixelSize = 4.2
Cn2h = [0.3, 0.2, 0.2, 0.1]
layerAltitude = [0,1000,8000,15000]
L0 = 25.0
offx, offy = (34., 12.)
wavelength = 1.65e-6
kx, ky, uk = computeSpatialFreqArrays(N, pixelSize, wavelength)
M4 = defineDmFrequencyArea(kx, ky, 0)
W = computeWiener(kx, ky, L0, 1.0)
f = anisoplanaticSpectrum(Cn2h, layerAltitude, L0, offx, offy,
wavelength, kx, ky, W, M4)
"""
# number of turbulent layers involved in that computation
nlayers = len(Cn2h)
# conversion arcsec to radians
RASC = 206264.8062471
# prepare memory alloc for transfer function of anisoplanatism
Haniso = np.zeros(kx.shape)
# loop over turbulent layers, summing transfer function of each layer
for i in range(nlayers):
dx = layerAltitude[i] * offx / RASC # shift in metres on the layer in X
dy = layerAltitude[i] * offy / RASC # idem, in Y
tmp = (2j*np.pi*dx)*kx + (2j*np.pi*dy)*ky
Haniso += Cn2h[i] * np.abs(1 - np.exp( tmp ))**2
# now applying the transfer function on the Wiener spectrum, only in the
# spatial frequency range of M4
Waniso = np.zeros(Haniso.shape)
Waniso[M4] = Haniso[M4] * Wiener[M4]
return Waniso
def fittingSpectrum(Wiener, M4):
"""
<Wiener> : turbulent spectrum from function computeWiener()
<M4> : frequency domain of M4 (boolean) computed by the
function M4 = defineDmFrequencyArea(kx, ky, 0)
Returns the spatial spectrum of the (so-called) fitting error, i.e. the
residual phase after a full, perfect, ideal, instantaneous, super-duper,
hyper-clean, theoretical compensation of M4. It is expressed in rad^2.m^2.
Result is returned with a frequency corner-centred representation.
Example::
N = 512
pixelSize = 4.2
L0 = 25.0
offx, offy = (34., 12.)
wavelength = 1.65e-6
kx, ky, uk = computeSpatialFreqArrays(N, pixelSize, wavelength)
M4 = defineDmFrequencyArea(kx, ky, 0)
r0 = 0.6
W = computeWiener(kx, ky, L0, r0)
f = fittingSpectrum(W, M4)
"""
Wfit = Wiener.copy()
Wfit[M4] = 0.0 # M4 cancels whatever is in its compensation domain
return Wfit
def otherSpectrum(nmRms, M4, uk, wavelength):
"""
<nmRms> : number of nm rms
<M4> : frequency domain of M4 (boolean) computed by the
function M4 = defineDmFrequencyArea(kx, ky, 0)
<uk> : # size of the 'spatial frequency pixel' in m^-1
<wavelength> : wavelength (metres)
Example::
N = 512
pixelSize = 4.2
L0 = 25.0
offx, offy = (34., 12.)
wavelength = 1.65e-6
kx, ky, uk = computeSpatialFreqArrays(N, pixelSize, wavelength)
M4 = defineDmFrequencyArea(kx, ky, 0)
nmRms = 250.
f = otherSpectrum(nmRms, M4, uk, wavelength)
"""
fact = 2 * np.pi * nmRms * 1e-9 / uk / wavelength
fact = fact**2
tot = np.sum(M4)
Wothers = np.zeros(M4.shape)
Wothers[M4] = fact / tot
return Wothers
def aliasingSpectrum(kx, ky, r0, L0, M4, dssp=0.4015):
"""
Example::
N = 512
pixelSize = 4.2
L0 = 25.0
r0 = 0.6
wavelength = 1.65e-6
rotdegree = 10.
kx, ky, uk = computeSpatialFreqArrays(N, pixelSize, wavelength)
M4 = defineDmFrequencyArea(kx, ky, rotdegree)
W = aliasingSpectrum(kx, ky, r0, L0, M4)
"""
ke = 1.0 / dssp # computes the sampling spatial-frequency of the WFS
kxt = kx[M4]
kyt = ky[M4]
Wt = ((kxt-ke)**2 + kyt**2 + 1./L0**2.)**(-11./6)
Wt += ((kxt+ke)**2 + kyt**2 + 1./L0**2.)**(-11./6)
Wt += (kxt**2 + (kyt-ke)**2 + 1./L0**2.)**(-11./6)
Wt += (kxt**2 + (kyt+ke)**2 + 1./L0**2.)**(-11./6)
Wt *= 0.0228956 * r0**(-5./3)
Walias = np.zeros(M4.shape)
Walias[M4] = Wt
return Walias
def computeBpSpectrum(kx, ky, V, Fe, tret, gain, Wiener, M4):
"""
Example::
N = 512
pixelSize = 4.2
L0 = 25.0
r0 = 0.6
wavelength = 1.65e-6
rotdegree = 10.
kx, ky, uk = computeSpatialFreqArrays(N, pixelSize, wavelength)
M4 = defineDmFrequencyArea(kx, ky, rotdegree)
W = computeWiener(kx, ky, L0, r0)
V = 10.
Fe = 500.
tret = 0.004
gain = 0.3
f = computeBpSpectrum(kx, ky, V, Fe, tret, gain, W, M4)
"""
k = np.sqrt(kx*kx + ky*ky)
nu = k * V / np.sqrt(2) # pourquoi un sqrt(2) ? je ne saurais dire ...!!!
Wbp = hcor(nu, Fe, tret, gain, 500) * Wiener
Wbp[np.logical_not(M4)] = 0.
return Wbp
def hcor(freq, Fe, tret, G, BP, an=True):
"""
***** Fonction extraite de STYC le 9 Jan 2013 *****
The option an=1 sets the integrator to "analog". Doing this, an
extra 1/2 frame delay is added compared to case of the numeric
integrator an=0.
<tret> is the delay expressed as a *time in seconds*, between the
end of the integration and the start of the command.
"""
Te = 1. / Fe
p = 1j * 2 * np.pi * freq + 1e-12
Hint = 1./(1-np.exp(-p*Te)) # numeric integrator
Hccd = (1.-np.exp(-p*Te))/(p*Te) # echant bloqueur avec retard 1/2 trame
Hdac = Hccd # echant bloqueur avec retard 1/2 trame
Hret = np.exp(-p*tret)
Hmir = 1./(1. + 1j*freq/BP)
Hbo = Hint * Hccd * Hdac * Hret * Hmir
Hcor = 1./abs(1 + Hbo*G)**2
return Hcor
def convertSpectrum2Dphi(W, uk):
"""
<W> : spatial spectrum to be converted into phase structure function
in rd^2.m^2
<uk> : # size of the 'spatial frequency pixel' in m^-1
Converts the 2D spectrum into a phase structure function Dphi.
Uses Dphi(r) = $ $ (1-cos(2.pi.k.r)) W(k) d2k
Computation of Dphi is in radians^2 at the wavelength of r0.
"""
W[0, 0] = 0.0
W[0, 0] = -np.sum(W)
Dphi = 2*np.abs(np.fft.fft2(W)) * (uk**2)
return Dphi
def fake_generatePupil(N, deadSegments, rotdegree, pixelSize, wavelength, rng=np.random.default_rng()):
"""
<N> : size of the output image, that is made to match the size
of the (square) psf image to be processed. In other
words, N = psf.shape[0]
<deadSegments> : number of hexa segments of M1 that are missing
<rotdegree> : pupil rotation in degrees
<pixelSize> : size of the pixels of the psf image (mas)
<wavelength> : wavelength (metres)
<rng> : optional random number generator for reproducible results
Examples::
N = 512
deadSegments = 3
rotdegree = 14.
pixelSize = 4.2
wavelength = 1.65e-6
pup = fake_generatePupil(N, deadSegments, rotdegree, pixelSize,
wavelength)
"""
nseg = pupil_utils.getEeltSegmentNumber()
refl = np.ones(nseg)+rng.standard_normal(nseg)/20.
if deadSegments:
refl[(rng.random(deadSegments)*nseg).astype(int)] = 0.
i0 = N/2+0.5
j0 = N/2+0.5
# field of view of the psf image in rd
FoV = N * pixelSize * 4.84813681109536e-06 # from arcsec to radians
# original line used mas
# FoV = N * pixelSize * 4.84813681109536e-09 # from mas to radians
# pixel scale of pupil image
pixscale = wavelength / FoV # expressed in metres
dspider = 0.53
gap = 0.02
pup = pupil_utils.generateEeltPupilReflectivity(refl, N, dspider, i0, j0,
pixscale, gap, rotdegree,
softGap=True)
return pup
def computeEeltOTF(pup):
"""
"""
# Computation of telescope OTF
Nx, Ny = pup.shape
FTOtel = np.fft.fft2( np.abs(np.fft.fft2(pup))**2 ).real
FTOtel /= np.sum(pup)**2 * Nx * Ny
return FTOtel
def core_generatePsf(Dphi, FTOtel):
"""
Examples
--------
::
N = 1024
pixelSize = 4.2
# atmospheric profile (old ESO profile before 2010)
layerAltitude = [47., 140, 281, 562, 1125, 2250, 4500, 9000, 18000.]
# from ref. E-SPE-ESO-276-0206_atmosphericparameters
Cn2h = [52.24, 2.6, 4.44, 11.60, 9.89, 2.95, 5.98, 4.30, 6]
Cn2h = np.array(Cn2h)
Cn2h /= np.sum(Cn2h)
# outer scale
L0 = 25.0
offx, offy = (15., 20.)
wavelength = 1.65e-6
rotdegree = 10.0
kx, ky, uk = computeSpatialFreqArrays(N, pixelSize, wavelength)
M4 = defineDmFrequencyArea(kx, ky, rotdegree)
r0 = 0.6
# This is the turbulent spectrum ....
W = computeWiener(kx, ky, L0, r0)
# And here are some of the PSF-destroyers
Waniso = anisoplanaticSpectrum(Cn2h, layerAltitude, L0, offx, offy,
wavelength, kx, ky, W, M4)
Wfit = fittingSpectrum(W, M4)
nmRms = 100.
Wother = otherSpectrum(nmRms, M4, uk, wavelength)
Dphi = convertSpectrum2Dphi(Waniso + Wfit + Wother, uk)
# Here, i need to generate a kind of PSF or telescope OTF
deadSegments = 3
pup = fake_generatePupil(N, deadSegments, rotdegree, pixelSize,
wavelength)
FTOtel = computeEeltOTF(pup)
psf = core_generatePsf(Dphi, FTOtel)
print(psf.max())
window = 100
plt.imshow( psf[N//2-window:N//2+window, N//2-window:N//2+window]**0.3 )
"""
# total FTO
FTO = np.exp(-0.5*Dphi) * FTOtel
# PSF
psf = np.fft.fftshift( np.fft.fft2(FTO).real )
return psf
def createAdHocScaoPsf(N, pixelSize, wavelengthIR, rotdegree, r0Vis, nmRms):
"""
<N> : size of the output image, that is made to match the size
of the (square) psf image to be processed. In other
words, N = psf.shape[0]
<pixelSize> : size of the pixels of the psf image (mas)
<wavelengthIR> : IR wavelength (metres)
<rotdegree> : pupil rotation (degrees)
<r0Vis> : value of r0 in the visible (metres)
<nmRms> : number of nm rms affecting the psf (nm)
Do not use that function.
It's just there to create a quicky random shitty psf.
It's there because i needed a random shitty psf.
So i wrote it.
And it's still there.
Example::
N = 512
pixelSise = 4.2 # mas
wavelengthIR = 1.65e-6 # metres
rotdegree = 10.
r0Vis = 0.12
nmRms = 150.
psf, pup = createAdHocScaoPsf(N, pixelSize, wavelengthIR, rotdegree,
r0Vis, nmRms)
"""
# let's compute r0 in the IR using the
# r0 chromatic translation formula
wavelengthVis = 500e-9
r0IR = r0Vis * (wavelengthIR / wavelengthVis)**(6/5.)
kx, ky, uk = computeSpatialFreqArrays(N, pixelSize, wavelengthIR)
M4 = defineDmFrequencyArea(kx, ky, rotdegree)
# I hardcode an outer scale of 25 metres
L0 = 25.
# This is the turbulent spectrum ....
W = computeWiener(kx, ky, L0, r0IR)
# And here are some of the PSF-destroyers
Wfit = fittingSpectrum(W, M4)
nmRms = 200.
Wother = otherSpectrum(nmRms, M4, uk, wavelengthIR)
Dphi = convertSpectrum2Dphi(Wfit + Wother, uk)
# Here, i need to generate a kind of PSF or telescope OTF
deadSegments = 3
pup = fake_generatePupil(N, deadSegments, rotdegree, pixelSize,
wavelengthIR)
FTOtel = computeEeltOTF(pup)
psf = core_generatePsf(Dphi, FTOtel)
return psf, pup
def r0Converter(r0, lambda1, lambda2):
"""
Converts a r0 defined at some wavelength lambda1,
into a r0' at lambda2.
Lambda1 and 2 shall be in the SAME unit.
Returned r0 will be in the SAME unit than the input one.
Example::
r0_K = r0Converter(0.12, 500, 2200)
"""
return r0 * (lambda2/lambda1)**(6/5.)
def airmassImpact(r0_at_zenith, zenith_distance):
"""
<r0_at_zenith> : r0 at zenith (any unit is ok)
<zenith_distance> : zenith distance (degrees)
The seeing/r0 actually perceived by a telescope depends on the zenith
distance. This function converts a r0 given at zenith into the real r0
actually observed by the telescope.
"""
z = zenith_distance * np.pi / 180 # the same, in radians
r0 = r0_at_zenith * np.cos(z)**(3./5)
return r0
def get_atmospheric_turbulence(myProfile='EsoMedian'):
"""
Returns the relative level of turbulence at a given height
Note: The np.sum(Cn2h) = 1.0
Turbulence profile have been taken from ESO-258292: "Relevant Atmospheric
Parameters for E-ELT AO Analysis and Simulations".
The following 3 turbulence profile are currently available:
* ``EsoQ1``
Best atmospheric conditions - First Quartile Armazones atmospheric
turbulence profile
* ``EsoMedian`` [also ``officialEsoMedian``]
Median atmospheric conditions - Median Armazones atmospheric turbulence
profile
* ``EsoQ4``
Worst atmospheric conditions - First Quartile Armazones atmospheric
turbulence profile
Additional turbulence profiles include:
* ``oldEso``
The old ESO profile from before 2010. Cn2h paramateres are taken from
ref. E-SPE-ESO-276-0206_atmosphericparameters
* ``gendron``
The Gendron profile. Short, fast. Saves CPU. Carbon efficient.
The np.sum(Cn2h) = 1.0 is hyper-guaranteed here.
Parameters
----------
myProfile : str, optional
Profile name: ['EsoQ1', 'EsoMedian', 'EsoQ4', 'oldEso', 'gendron']
Returns
-------
layerAltitude : list of floats
[m] height of layer above ground
Cn2h : list of floats
Relative strength of turbulence
"""
layerAltitude, Cn2h = [], []
if myProfile == 'oldEso':
layerAltitude = [47., 140, 281, 562, 1125, 2250, 4500, 9000, 18000.]
Cn2h = [0.5224, 0.026, 0.0444, 0.116, 0.0989,
0.0295, 0.0598, 0.043, 0.06]
elif myProfile == 'officialEsoMedian' or myProfile == 'EsoMedian' :
layerAltitude = [30, 90, 150, 200, 245, 300, 390, 600, 1130, 1880, 2630,
3500, 4500, 5500, 6500, 7500, 8500, 9500, 10500, 11500,
12500, 13500, 14500, 15500, 16500, 17500, 18500, 19500,
20500, 21500, 22500, 23500, 24500, 25500, 26500]
Cn2h = [24.2, 12, 9.68, 5.9, 4.73, 4.73, 4.73, 4.73, 3.99, 3.24, 1.62,
2.6, 1.56, 1.04, 1, 1.2, 0.4, 1.4, 1.3, 0.7, 1.6, 2.59, 1.9,
0.99, 0.62, 0.4, 0.25, 0.22, 0.19, 0.14, 0.11, 0.06, 0.09, 0.05,
0.04]
Cn2h = np.array(Cn2h)
Cn2h /= np.sum(Cn2h)
elif myProfile == 'EsoQ1':
layerAltitude = [30, 90, 150, 200, 245, 300, 390, 600, 1130, 1880, 2630,
3500, 4500, 5500, 6500, 7500, 8500, 9500, 10500, 11500,
12500, 13500, 14500, 15500, 16500, 17500, 18500, 19500,
20500, 21500, 22500, 23500, 24500, 25500, 26500]
Cn2h = [22.6, 11.2, 10.1, 6.4, 4.15, 4.15, 4.15, 4.15, 3.1, 2.26, 1.13,
2.21, 1.33, 0.88, 1.47, 1.77, 0.59, 2.06, 1.92, 1.03, 2.3, 3.75,
2.76, 1.43, 0.89, 0.58, 0.36, 0.31, 0.27, 0.2, 0.16, 0.09, 0.12,
0.07, 0.06]
Cn2h = np.array(Cn2h)
Cn2h /= np.sum(Cn2h)
elif myProfile == 'EsoQ4':
layerAltitude = [30, 90, 150, 200, 245, 300, 390, 600, 1130, 1880, 2630,
3500, 4500, 5500, 6500, 7500, 8500, 9500, 10500, 11500,
12500, 13500, 14500, 15500, 16500, 17500, 18500, 19500,
20500, 21500, 22500, 23500, 24500, 25500, 26500]
Cn2h = [23.6, 13.1, 9.81, 5.77, 6.58, 6.58, 6.58, 6.58, 5.4, 3.2, 1.6,
2.18, 1.31, 0.87, 0.37, 0.45, 0.15, 0.52, 0.49, 0.26, 0.8, 1.29,
0.95, 0.49, 0.31, 0.2, 0.12, 0.1, 0.09, 0.07, 0.06, 0.03, 0.05,
0.02, 0.02]
Cn2h = np.array(Cn2h)
Cn2h /= np.sum(Cn2h)
elif myProfile == 'gendron':
Cn2h = [1.0]
layerAltitude = [4414.]
return layerAltitude, Cn2h
def get_profile_defaults(myProfile="EsoMedian"):
"""
Data taken from ESO-258292
Parameters
----------
myProfile : str
[EsoMedian, EsoQ1, EsoQ4, OldEso, Gendron]
Returns
-------
seeing : float
[arcsec]
zen_dist : float
[deg] Zenith distance
wind_alpha : float
Multiplication factor for the wind profile
"""
seeing = 0.67
zen_dist = 30
wind_alpha = 1.
if myProfile == 'EsoQ1':
seeing = 0.4
zen_dist = 0
wind_alpha = 0.88
elif myProfile == 'EsoQ4':
seeing = 1.0
zen_dist = 60
wind_alpha = 1.3
return seeing, zen_dist, wind_alpha
def clean_psf(psf, threshold):
psf[psf < threshold] = 0
edge_threshold = np.median([psf[:, 0], psf[0, :]])
psf[psf < edge_threshold] = 0
psf /= np.sum(psf)
return psf
def round_edges(kernel, edge_width=10):
n = edge_width
falloff = np.cos(1.5708 * np.arange(n) / (n-1)).reshape([1, n])
kernel[:n, :] *= falloff.T[::-1, :]
kernel[-n:, :] *= falloff.T
kernel[:, :n] *= falloff[:, ::-1]
kernel[:, -n:] *= falloff
return kernel
|
AnisoCADO
|
/AnisoCADO-0.3.0.tar.gz/AnisoCADO-0.3.0/anisocado/psf_utils.py
|
psf_utils.py
|
import numpy as np
from astropy.io import fits
from astropy.table import Table
from matplotlib import pyplot as plt
from matplotlib.colors import LogNorm
from anisocado import AnalyticalScaoPsf
def strehl_map(r=25, dr=3, **kwargs):
psf = AnalyticalScaoPsf(**kwargs)
x, y = np.mgrid[-r:r+dr:dr, -r:dr+r:dr]
strlmap = np.zeros(x.shape)
for i in range(len(x)):
for j in range(len(y)):
psf.shift_off_axis(x[i, j], y[i, j])
strlmap[i, j] = psf.strehl_ratio
return strlmap
def on_axis_strehl_for_kernel_size(Narr=(128, 512, 2048), **kwargs):
"""Only for the on-axis kernel"""
return [AnalyticalScaoPsf(N=N, **kwargs).strehl_ratio for N in Narr]
def make_psf_grid(r=14, dr=7, **kwargs):
psf = AnalyticalScaoPsf(**kwargs)
x, y = np.mgrid[-r:r+1:dr, -r:r+1:dr]
psf_grid = []
for i in range(len(x)):
for j in range(len(y)):
psf.shift_off_axis(x[i, j], y[i, j])
psf_grid += [psf.kernel]
return psf_grid
def make_image_of_psf_grid(filt_name="Ks", wave=2.15, for_joss=True):
psf_grid = make_psf_grid(wavelength=wave, N=128)
plt.figure(figsize=(10, 10))
i = 0
for y in range(5):
for x in range(5):
plt.subplot(5, 5, 1+x+5*(4-y))
plt.imshow(psf_grid[i], origin="l", norm=LogNorm())
plt.axis("off")
plt.title("({}, {})".format((7*x-14), (7*x-14)))
i += 1
if for_joss:
plt.tight_layout()
path = "../docs/joss_paper/{}-band_psf_grid".format(filt_name)
plt.savefig(path+".png", format="png")
plt.savefig(path+".pdf", format="pdf")
else:
plt.suptitle("{}-band ({}um) SCAO PSFs".format(filt_name, wave))
# make_image_of_psf_grid("Ks", 2.15)
# make_image_of_psf_grid("H", 1.6)
# make_image_of_psf_grid("J", 1.2)
def make_simcado_psf_file(coords, wavelengths, header_cards=None, **kwargs):
"""
Generate a set of Field-Varying PSF cubes for use with SimCADO
Parameters
----------
coords : list of tuples
[arcsec] Sample positions of the PSF in field of view. (0, 0) is the
centre of the field of view.
wavelengths : list
[um] The wavelengths for which the PSF should be sampled
header_cards : dict, optional
Any extra keyword-value pair to be added to the extension 0 header.
kwargs
------
Keyword-value pairs accepted by an ``AnalyticalScaoPsf`` object
Returns
-------
hdulist : fits.HDUList
A HDUList object which is formatted for use as a Field-Varying PSF in
SimCADO
Examples
--------
::
import anisocado
radii = [1, 3, 5, 10] # arcsec
waves = [1.0, 1.2, 1.6, 2.15] # um
coords = anisocado.field_positions_for_simcado_psf(radii, theta=60)
hdu = anisocado.make_simcado_psf_file(coords=coords, wavelengths=waves,
N=512, profile_name="EsoQ1")
hdu.writeto("new_scao_psf_cube.fits")
"""
# accept list of coordinates
# accept list of wavelengths
# make ext0
# make table for ext1
# initialise PSF
# shift to each positions
# make hdus for each position
# build hdulist
# return hdulist
x, y = np.array(coords).T
layers = np.arange(len(x))
keys = ["AUTHOR", "DATE_CRE", "DATE_MOD", "SOURCE", "STATUS"]
ext0_dict = {key: "" for key in keys}
ext0_dict["ETYPE"] = "FVPSF"
ext0_dict["ECAT"] = (1, "The extension containing the catalogue data")
ext0_dict["EDATA"] = (2, "The first extension with real data")
ext0_dict.update({"WAVEEXT{}".format(i + 2): w
for i, w in enumerate(wavelengths)})
pri_hdr = fits.PrimaryHDU()
pri_hdr.header.update(ext0_dict)
pri_hdr.header.update(header_cards)
ext1_dict = {"NUMPSFS": len(x), "CATTYPE": "table", "CUNIT1": "arcsec"}
tbl = Table(data=[x, y, layers], names=["x", "y", "layer"])
cat_hdu = fits.table_to_hdu(tbl)
cat_hdu.header.update(ext1_dict)
psf_hdus = []
for wave in wavelengths:
print("Making psf cube for {} um".format(wave))
psf = AnalyticalScaoPsf(wavelength=wave, **kwargs)
kernel_cube = [psf.shift_off_axis(dx, dy) for dx, dy in coords]
psf_hdu = psf.hdu
psf_hdu.data = np.array(kernel_cube)
psf_hdu.header["WAVE0"] = wave
psf_hdu.header["WAVEUNIT"] = "um"
psf_hdus += [psf_hdu]
hdulist = fits.HDUList([pri_hdr, cat_hdu] + psf_hdus)
return hdulist
def field_positions_for_simcado_psf(radii=None, theta=45):
"""
Generates a list of field position where the PSF will be sampled
The PSF will be sampled at intervals of ``theta`` around concentric circles
placed at distances ``radii`` from the centre of the field of view.
Default radii are at [1, 2, 4, 8, 16, 32] arcsec
Parameters
----------
radii : list of floats
[arcsec] Radii of concentric circles where the PSF will be sampled
theta : float
[deg] Spacing between PSF samples around each circle
Returns
-------
coords : list of tuples
[arcsec] List of sample positions relative to the centre of the field
Examples
--------
::
import anisocado
cds = anisocado.field_positions_for_simcado_psf(radii=[5, 10], theta=60)
hdu = anisocado.make_simcado_psf_file(cds, wavelengths=[1.2, 1.6, 2.2])
"""
coords = [(0, 0)]
if radii is None:
radii = [1, 2, 4, 8, 16, 32]
for r in radii:
for ang in np.arange(0, 360, theta):
coords += [(r * np.cos(np.deg2rad(ang)),
r * np.sin(np.deg2rad(ang)))]
return coords
def make_strehl_map_from_coords(coords):
x, y = np.array(coords).T
from scipy.interpolate import griddata
map = griddata((x, y), np.arange(len(x)),
np.array(np.meshgrid(np.arange(-25, 26),
np.arange(-25, 26))).T,
method="nearest")
return map
|
AnisoCADO
|
/AnisoCADO-0.3.0.tar.gz/AnisoCADO-0.3.0/anisocado/misc.py
|
misc.py
|
import matplotlib.pyplot as plt
from anisocado.psf_utils import *
from anisocado.psf_utils import get_atmospheric_turbulence
# _ _ ____ _
# | | | |___ ___ / ___|__ _ ___ ___ / |
# | | | / __|/ _ \ | | / _` / __|/ _ \ | |
# | |_| \__ \ __/ | |__| (_| \__ \ __/ | |
# \___/|___/\___| \____\__,_|___/\___| |_|
def shift_scao_psf(plots=False):
"""
Problem:
You have an on-axis PSF.
You want to 'move' it off-axis, let's say (+15, +20) arcsec.
For that, you will need to know:
- the Cn2h profile
- the global r0
- the global L0
"""
###########
# Setup
# Let's take an example. I create a PSF, that could be coming from
# a SCAO simulation. It has 512x512 pixels, sampled with 4.2 mas in H band.
# Pupil is rotated by 10 deg. The r0 was 12cm.
#
# That psf will be the starting point.
N = 512
pixelSize = 4.2 # mas
wavelengthIR = 1.65e-6 # metres
rotdegree = 10. # deg
r0Vis = 0.12
nmRms = 150.
psf, pup = createAdHocScaoPsf(N, pixelSize, wavelengthIR, rotdegree, r0Vis,
nmRms)
if plots:
# I can even look at it:
plt.imshow(psf.T, origin='l')
print('Strehl ratio of initial psf is ', psf.max())
# OK. That's the starting point.......................
# Now I need to know the atmospheric properties, in particular the Cn2h
# profile. Let me offer you a little selection of atmospheric profiles.
profile_name = "gendron"
layerAltitude, Cn2h = get_atmospheric_turbulence(profile_name)
# Let us define the outer scale value. 25 metres is the Armazones median
# value from doc. ESO-258292.
# Choose 15 m for a lucky observer. 50 m for the looser.
L0 = 25. # 25 metres is the Armazones median value from doc. ESO-258292.
# Now, the seeing.
# Here, we're cheating, we already know the r0 is 12cm because we've
# generated the PSF with this.
r0Vis = 0.12 # I know, we know it already ...
# I also need to know where's the off-axis star I want to simulate
# in arcsecs (this one is for those who'd like to check nothing will change
# at the end)
# offx, offy = (0, 0)
offx, offy = (0., 16.) # in arcsecs
####################
# Generate PSF
# Then let's start the work. I will create spatial frequency arrays.
kx, ky, uk = computeSpatialFreqArrays(N, pixelSize, wavelengthIR)
# convert r0 in the infra-red
r0IR = r0Converter(r0Vis, 500e-9, wavelengthIR)
# and create M4 working domain
M4 = defineDmFrequencyArea(kx, ky, rotdegree)
# and finally the turbulent spectrum ....
W = computeWiener(kx, ky, L0, r0IR)
# and all this will be used to run that function below, that will compute
# the spatial spectrum of the phase due to anisoplanatism
Waniso = anisoplanaticSpectrum(Cn2h, layerAltitude, L0, offx, offy,
wavelengthIR, kx, ky, W, M4)
# Transforming this spectrum into a phase structure function
Dphi = convertSpectrum2Dphi(Waniso, uk)
# Here, the on-axis psf comes into play ... I take its Fourier transform
fto = np.fft.fft2(np.fft.fftshift(psf)) / N**2 # it's complex.
psf_aniso = core_generatePsf(Dphi, fto)
print('Strehl off-axis is', psf_aniso.max())
plt.imshow(psf_aniso.T, origin='l' )
return psf_aniso
# _ _ ____ ____
# | | | |___ ___ / ___|__ _ ___ ___ |___ \
# | | | / __|/ _ \ | | / _` / __|/ _ \ __) |
# | |_| \__ \ __/ | |__| (_| \__ \ __/ / __/
# \___/|___/\___| \____\__,_|___/\___| |_____|
def exnihilo_scao_psf():
"""
You want to generate SCAO PSFs ex-nihilo, using a simple, approximative
simulation software.
You are aware that the simulated PSFs will be perfectly smooth (infinitely
converged), they do not reflect the fluctuations associated to short
exposures.
For this you need to know all the parameters of the simulation.
"""
###########
# Setup
# nb of pixels of the image to be simulated.
N = 1024
pixelSize = 3.1 # mas
# this is where you observe
wavelengthIR = 2.2e-6 # oh! a K band ...
# Now you need to tell what turbulence looks like.
# Here you can imagine to take those numbers into ESO doc, statistics,
# you can also include some dependence of r0 with respect to airmass
#
# If everything is as usual, you should end up with too many parameters that
# nobody knows about, and someone will tell you "ok it's very nice, but can
# you please simplify this ?"
layerAltitude = [47., 140, 281, 562, 1125, 2250, 4500, 9000, 18000.]
# from ref. E-SPE-ESO-276-0206_atmosphericparameters
Cn2h = [0.5224, 0.026, 0.0444, 0.116, 0.0989, 0.0295, 0.0598, 0.043, 0.06]
L0 = 25. # 25 metres is the Armazones median value
seeing = 0.8 # in arcseconds
r0Vis = 0.103 / seeing # r0Vis is in metres here, 0.103 is in metres.arcsec
r0IR = r0Converter(r0Vis, 500e-9, wavelengthIR) # convert r0 at 500nm to IR
# Just to use that wonderful function, i decide that the seeing given here
# was expressed at zenith, while our telescope observes at 30° from zenith.
# This will transform our r0 into the effective/actual one seen at 30°.
# In addition, the turbulent layers will all appear further away from the
# telescope, so that their apparent distance grows with airmass --> this is
# very bad for anisoplanatism !..
zenDist = 30. # I observe at 30° from zenith
r0IR = airmassImpact(r0IR, zenDist) # apparent seeing degrades with airmass
layerAltitude = np.array(layerAltitude)
layerAltitude *= 1/np.cos(zenDist*np.pi/180) # layers appear further away
# Also you may want to say something about how the pupil is rotated wrt
# your image
rotdegree = 10.0
# And you also need to generate the EELT pupil properly
deadSegments = 5 # there are some missing segments tonight !
pup = fake_generatePupil(N, deadSegments, rotdegree, pixelSize,
wavelengthIR)
# For temporal aspects you need to know the characteristics of your system
V = 10. # wind is 10 m/s
Fe = 500. # sampling frequency of the system is 500 Hz
tret = 0.004 # delay in the loop is 4 ms
gain = 0.3 # closed-loop gain is 0.3
# Here is the position of the object in the field
offx, offy = (10., 0.)
#################
# PSF generation
# Let's go. Let's define some basic parameters (arrays of spatial
# frequencies)
kx, ky, uk = computeSpatialFreqArrays(N, pixelSize, wavelengthIR)
M4 = defineDmFrequencyArea(kx, ky, rotdegree)
# This is the turbulent spectrum ....
W = computeWiener(kx, ky, L0, r0IR)
# And here are some of the PSF-destroyers - (English: Wavefront errors)
Waniso = anisoplanaticSpectrum(Cn2h, layerAltitude, L0, offx, offy,
wavelengthIR, kx, ky, W, M4)
Wfit = fittingSpectrum(W, M4)
Walias = aliasingSpectrum(kx, ky, r0IR, L0, M4)
Wbp = computeBpSpectrum(kx, ky, V, Fe, tret, gain, W, M4)
nmRms = 100.
Wother = otherSpectrum(nmRms, M4, uk, wavelengthIR)
# THE missing term = noise
# Wnoise = noiseSpectrum(Rmag, .. kx, ky) available one day ...
# Now, you sum up every contributor, and produce a phase structure function
Dphi = convertSpectrum2Dphi(Waniso + Wfit + Wother + Walias + Wbp, uk)
# And you "blur" the nice Airy pattern using that phase structure function
FTOtel = computeEeltOTF(pup)
psf = core_generatePsf(Dphi, FTOtel)
print('Strehl is ', psf.max())
plt.imshow( np.log(psf) )
return psf
# _ _ ____ _____
# | | | |___ ___ / ___|__ _ ___ ___ |___ /
# | | | / __|/ _ \ | | / _` / __|/ _ \ |_ \
# | |_| \__ \ __/ | |__| (_| \__ \ __/ ___) |
# \___/|___/\___| \____\__,_|___/\___| |____/
def instantaneous_scao_psf():
"""
The dirty one.
Let's try to simulate the fluctuations due to short exposures.
"""
# I start from "use case 2", and I sum the contributors to the phase error.
# What I get is the total power spectrum of the perturbed phase.
WW = Waniso + Wfit + Wother + Walias + Wbp
# So, i'm gonna do some random draw of a phase that follows the statistics
# of the spectrum WW. For that, i'm gonna use sqrt(WW) as the modulus of the
# FFT of the phase, and generate a random phase chosen uniformly between 0
# and 2.pi. I will then do a FFT of that, in order to get the phase.
WW[0, 0] = 0 # because i don't care about piston mode
WW = np.sqrt(WW)
tmp = np.fft.fft2(WW * np.exp(2j * np.pi*np.random.rand(N, N))) * (uk)
ph1 = tmp.real * np.sqrt(2)
ph2 = tmp.imag * np.sqrt(2)
# now i compute widthScreen, the size of the pixels of the phase screens I
# have generated.
widthScreen = 1. / uk # in metres
ud = widthScreen / N # size of the pixels of the phase screen
# With such a wide screen, and using a wind speed of V m/s, then I can
# simulate an exposure time of (widthScreen/V) seconds.
# I recommend to sum psf snapshots every 50 cm (actuator pitch of M4).
step = 0.50
stepPix = int(np.round(step / ud))
stepTime = (stepPix * ud) / V
DIT = 1.0 # 1 second integration time
niter = int(np.round(DIT / stepTime))
psfLE = 0 # psf Long Exposure
normFactor = np.sum(pup)**2
for i in range(niter):
psfSE = np.fft.fftshift(np.abs(np.fft.fft2(pup * np.exp(1j*ph1)))**2)
psfSE /= normFactor
print(psfSE.max())
psfLE += psfSE
ph1 = np.roll(ph1, stepPix, axis=0)
psfLE /= niter
# Here, possibilities are infinite ..
# You can add some static aberrations, etc etc,
# and generate all the PSFs you want.
|
AnisoCADO
|
/AnisoCADO-0.3.0.tar.gz/AnisoCADO-0.3.0/anisocado/_anisocado.py
|
_anisocado.py
|
import numpy as np
# import matplotlib.pyplot as plt
# plt.ion()
"""
CONVENTIONS : Ce fichier est ecrit en convention X,Y
...................................................... 1.0 Convention X,Y
Dans la convention d'axes (x,y), tous les tableaux representant des images
s'adressent par
tab[ix, iy]
Les indices s'utilisent alors classiquement
ind = np.where(tab2D > 0)
(ix, iy) = np.where(tab2D > 0)
où ind[0] porte X. On fera donc appel a
tab[ind]
tab[ind[0], tab[1]]
tab[ix, iy]
La fonction meshgrid fait chier, et doit s'appeler par
X,Y = meshgrid(x,y, indexing='ij')
pour creer des tableaux conformes a un appel en X[ix, iy].
Dans les tableaux qui stockent des coord en x,y on utilise
x = tab[:,0]
y = tab[:,1]
pour que l'utilisation de flatten() mette X en premier. Cette facon de proceder
est logique/compatible avec les indices.
Dans les fonctions on place x d'abord, y ensuite, dans les arguments comme
dans le retour de fonction
def toto(...,x,y,...)
return x, y
et
x, y = toto(...)
Graphisme: Pour afficher un tel tableau et avoir une representation
'naturelle' avec x "a droite" et y "en haut" il faut par contre
utiliser une transposition et retournement d'axe
plt.imshow(tab.T, origin='lower')
et qui offre une utilisation classique de
plt.plot(x, y, ...)
qui placera un overlay d'un plot de façon coherente sur l'image affichee.
Une sortie FITS d'un tableau tab[x,y] va generer un fichier qui contient des
data avec un axe rapide NAXIS1 dirige selon Y. Attention car de nombreux
logiciels considerent axe rapide = axe x (par exemple ds9), ou representent
l'axe rapide souvent horizontal en natif (ds9, python, yorick, idl, ...)
Donc, pour les entrees/sorties on transposera les data
pf.writeto('monfichier.fits', tab.T)
et
tab = pf.getdata('monfichier.fits').T
...................................................... 2.0 Convention Y,X
Dans la convention d'axe (y,x), tous les tableaux d'images sont
adresses par
tab[iy, ix]
Pour une manipulation d'indices du genre
ind = np.where(tab2D > 0)
on utilise l'une des 3 possibilites suivantes
ix = ind[1]; iy = ind[0] # attention a bien swapper y=0/x=1
(iy, ix) = ind # y vient d'abord, coherent avec les tableaux
(iy, ix) = where(tab2D > 0) # y vient d'abord
et appeler les elements des tableaux par
tab[iy, ix]
tab[ind]
Toutes les autres notations restent en "x d'abord, y ensuite".
Dans une procedure de calcul de coordonnees, on gardera la convention
def toto(args, x, y, ):
...
return x, y
et a l'appel de la fonction et recup des coordonnees on garde
x, y = toto(args, ax, ay, ..)
La fonction meshgrid doit s'appeler par
X,Y = meshgrid(x,y)
pour creer des tableaux conformes a un appel en X[iy, ix].
Dans les tableaux qui stockent des coord en x,y on utilise
x = tab[:,0]
y = tab[:,1]
pour que l'utilisation de flatten() mette X en premier. Cependant cette
notation est en conflit avec le traitement des indices (fonction where()) qui
placent Y d'abord.
Graphisme: Pour afficher un tel tableau et avoir une representation
'naturelle' avec x "a droite" et y "en haut" il faut juste utiliser le
retournement d'axe
plt.imshow(tab, origin='lower')
suivi d'une utilisation classique de
plt.plot(x, y, ...)
qui placera un overlay d'un plot de façon coherente sur l'image affichee.
Une sortie FITS d'un tableau tab[x,y] cree un fichier qui aura un axe
rapide NAXIS1 selon X, coherent avec la plupart des logiciels (ds9, python,
yorick, idl, ...)
............................................................ 3.0 Meshgrid
Quand on definit
x = ...
y = ...
et qu'on utilise np.meshgrid(), on a
code convention d'appel mat/imshow
X,Y = meshgrid(x,y) [y,x] X horizontal
X,Y = meshgrid(x,y,indexing='ij') [x,y] X vertical :-(
Y,X = meshgrid(y,x) [x,y] X vertical :-(
Y,X = meshgrid(y,x,indexing='ij') [y,x] X horizontal
X,Y = meshgrid(y,x) incoherent
meshgrid(x,y) equivaut a indexing='xy' (default).
Indexing 'ij' ou 'xy' affecte la transposition des X,Y de sortie.
L'interversion x/y dans les parametres d'appel et de sortie affecte la
transposition des X/Y.
Donc
- soit on utilise meshgrid(x,y) sans option d'indexing, ce qui donne des
plot 2D plutot user-friendly sans pencher la tete et sans transposee, mais
la notation d'appel dans les tableaux doit etre [iy,ix]
- soit on utilise meshgrid(..,indexing='ij'), on aura avec imshow() sans
option des plots a regarder avec la tete 90° a droite, et une notation [x,y]
partout dans le code.
Pour avoir les plots python "dans le bon sens" avec une notation [x,y] il faut
plt.imshow(p.T,origin='lower').
L'utilisation
X,Y = meshgrid(y,x)
ou
Y,X = meshgrid(x,y)
n'a de sens que si x==y (par exemple x=y=np.linspace(-1,1,n) ...) et permet
le meme rendu que indexing='ij'. Mais c'est un joli hasard. Des que les
axes x et y se distinguent, soit par le nbre de points, soit par les
valeurs (soit le range, le step, l'offset, etc.) alors inverser x et y est
juste purement incoherent.
"""
def fillPolygon(x, y, i0, j0, scale, gap, N, index=0):
"""
From a list of points defined by their 2 coordinates list
x and y, creates a filled polygon with sides joining the points.
The polygon is created in an image of size (N, N).
The origin (x,y)=(0,0) is mapped at pixel i0, j0 (both can be
floating-point values).
Arrays x and y are supposed to be in unit U, and scale is the
pixel size in U units.
:returns: filled polygon (N, N), boolean
:param float x, y: list of points defining the polygon
:param float i0, j0: index of pixels where the pupil should be centred.
Can be floating-point indexes.
:param float scale: size of a pixel of the image, in same unit as x and y.
:param float N: size of output image.
:Example:
x = np.array([1,-1,-1.5,0,1.1])
y = np.array([1,1.5,-0.2,-2,0])
N = 200
i0 = N/2
j0 = N/2
gap = 0.
scale = 0.03
pol = fillPolygon(x, y, i0, j0, scale, gap, N, index=2)
"""
# define coordinates map centred on (i0,j0) with same units as x,y.
X = (np.arange(N) - i0) * scale
Y = (np.arange(N) - j0) * scale
X, Y = np.meshgrid(X, Y, indexing='ij') # indexage [x,y]
# define centre of polygon x0, y0
x0 = np.mean(x)
y0 = np.mean(y)
# compute angles of all pixels coordinates of the map, and all
# corners of the polygon
T = (np.arctan2(Y - y0, X - x0) + 2 * np.pi) % (2 * np.pi)
t = (np.arctan2(y - y0, x - x0) + 2 * np.pi) % (2 * np.pi)
# on va voir dans quel sens ca tourne. Je rajoute ca pour que ca marche
# quel que soit le sens de rotation des points du polygone.
# En fait, j'aurais peut etre pu classer les points par leur angle, pour
# etre sur que ca marche meme si les points sont donnes dans ts les cas
sens = np.median(np.diff(t))
if sens < 0:
x = x[::-1]
y = y[::-1]
t = t[::-1]
# re-organise order of polygon points so that it starts from
# angle = 0, or at least closest to 0.
imin = t.argmin() # position of the minimum
if imin != 0:
x = np.roll(x, -imin)
y = np.roll(y, -imin)
t = np.roll(t, -imin)
# For each couple of consecutive corners A, B, of the polygon, one fills
# the triangle AOB with True.
# Last triangle has a special treatment because it crosses the axis
# with theta=0=2pi
n = x.shape[0] # number of corners of polygon
indx, indy = (np.array([], dtype=np.int64), np.array([], dtype=np.int64))
distedge = np.array([], dtype=np.float64)
for i in range(n):
j = i + 1 # j=element next i except when i==n : then j=0 (cycling)
if j == n:
j = 0
sub = np.where((T >= t[-1]) | (T <= (t[0])))
else:
sub = np.where((T >= t[i]) & (T <= t[j]))
# compute unitary vector des 2 sommets
dy = y[j] - y[i]
dx = x[j] - x[i]
vnorm = np.sqrt(dx ** 2 + dy ** 2)
dx /= vnorm
dy /= vnorm
# calcul du produit vectoriel
crossprod = dx * (Y[sub] - y[i]) - dy * (X[sub] - x[i])
tmp = crossprod > gap
indx = np.append(indx, sub[0][tmp])
indy = np.append(indy, sub[1][tmp])
distedge = np.append(distedge, crossprod[tmp])
# choice of what is returned : either only the indexes, or the
# boolean map
if index == 1:
return (indx, indy, distedge)
elif index == 2:
a = np.zeros((N, N))
a[indx, indy] = distedge
return a
else:
a = np.zeros((N, N), dtype=np.bool_)
a[indx, indy] = True # convention [x,y]
return a
def centrePourVidal(N, i0, j0, centerMark):
"""
Renvoie une image de boolens (False) de taille (N,N) avec un point
ou une croix (True) centree sur (i0, j0).
:param int N: taille de l'image de sortie
:param float i0, j0: position du marqueur de sortie
:param int centerMark: 0 (pour rien), 1 (option point) ou 2 (option croix)
"""
scale = 1.0
res = 0
X = (np.arange(N) - i0) * scale
Y = (np.arange(N) - j0) * scale
X, Y = np.meshgrid(X, Y, indexing='ij') # convention d'appel [x,y]
if centerMark == 1:
res = (X ** 2 + Y ** 2) < 1
if centerMark == 2:
res = (np.abs(X) < 0.9) | (np.abs(Y) < 0.9)
return res
def fillSpider(N, nspider, dspider, i0, j0, scale, rot):
"""
Creates a boolean spider mask on a map of dimensions (N,N)
The spider is centred at floating-point coords (i0,j0).
:returns: spider image (boolean)
:param int N: size of output image
:param int nspider: number of spiders
:param float dspider: width of spiders
:param float i0: coord of spiders symmetry centre
:param float j0: coord of spiders symmetry centre
:param float scale: size of a pixel in same unit as dspider
:param float rot: rotation angle in radians
"""
a = np.ones((N, N), dtype=np.bool_)
X = (np.arange(N) - i0) * scale
Y = (np.arange(N) - j0) * scale
X, Y = np.meshgrid(X, Y, indexing='ij') # convention d'appel [x,y]
w = 2 * np.pi / nspider
# rot += np.pi/2 # parce que c'est comme ca !!
for i in range(nspider):
nn = (abs(
X * np.cos(i * w - rot) + Y * np.sin(i * w - rot)) < dspider / 2.)
a[nn] = False
return a
def generateEeltPupil_slow(npt, dspider, i0, j0, pixscale, rotdegree):
"""
Computes the binary EELT pupil on a map of size (npt, npt).
This is the original function, that builds the pupil shape according to
hardcoded contours.
This function is now obsolete, because it's been replaced by the faster
one generateEeltPupilMask()
:returns: pupil image (npt, npt), boolean
:param float dspider: width of spiders in meters
:param float i0, j0: index of pixels where the pupil should be centred.
Can be floating-point indexes.
:param float pixscale: size of a pixel of the image, in meters.
:param float rotdegree: rotation angle of the pupil, in degrees.
:Example:
>>> pup = generateEeltPupil_slow(800, 0.6, 400, 400, 0.1, 3.0)
"""
x = np.array(
[18.4524, 18.798, 18.4514, 18.796, 18.4484, 18.7919, 18.4433, 18.7858,
18.4363, 18.7776,
18.4273, 17.7349, 17.3831, 17.7243, 17.3717, 16.6772, 16.3233, 16.6645,
16.3099,
15.6135, 15.2579, 15.5991, 15.2429, 14.545, 14.188, 14.5292, 14.1718,
13.4727, 13.1146,
12.4138, 12.0552, 11.3528, 10.9939, 10.2902, 9.93103, 9.22619, 8.86699,
8.16118, 7.8021,
7.09552, 6.73671, 6.02955, 5.67117, 4.96362, 4.60582, 3.89808, 3.54699,
2.83805,
2.48141, 1.77263, 1.41934, 0.709707, 0.354564, -0.354564, -0.709707,
-1.41934,
-1.77263, -2.48141, -2.83805, -3.54699, -3.89808, -4.60582, -4.96362,
-5.67117,
-6.02955, -6.73671, -7.09552, -7.8021, -8.16118, -8.86699, -9.22619,
-9.93103,
-10.2902, -10.9939, -11.3528, -12.0552, -12.4138, -13.1146, -13.4727,
-14.1718,
-14.5292, -14.188, -14.545, -15.2429, -15.5991, -15.2579, -15.6135,
-16.3099, -16.6645,
-16.3233, -16.6772, -17.3717, -17.7243, -17.3831, -17.7349, -18.4273,
-18.7776,
-18.4363, -18.7858, -18.4433, -18.7919, -18.4484, -18.796, -18.4514,
-18.798, -18.4524,
-18.798, -18.4514, -18.796, -18.4484, -18.7919, -18.4433, -18.7858,
-18.4363, -18.7776,
-18.4273, -17.7349, -17.3831, -17.7243, -17.3717, -16.6772, -16.3233,
-16.6645,
-16.3099, -15.6135, -15.2579, -15.5991, -15.2429, -14.545, -14.188,
-14.5292, -14.1718,
-13.4727, -13.1146, -12.4138, -12.0552, -11.3528, -10.9939, -10.2902,
-9.93103,
-9.22619, -8.86699, -8.16118, -7.8021, -7.09552, -6.73671, -6.02955,
-5.67117,
-4.96362, -4.60582, -3.89808, -3.54699, -2.83805, -2.48141, -1.77263,
-1.41934,
-0.709707, -0.354564, 0.354564, 0.709707, 1.41934, 1.77263, 2.48141,
2.83805, 3.54699,
3.89808, 4.60582, 4.96362, 5.67117, 6.02955, 6.73671, 7.09552, 7.8021,
8.16118, 8.86699,
9.22619, 9.93103, 10.2902, 10.9939, 11.3528, 12.0552, 12.4138, 13.1146,
13.4727,
14.1718, 14.5292, 14.188, 14.545, 15.2429, 15.5991, 15.2579, 15.6135,
16.3099, 16.6645,
16.3233, 16.6772, 17.3717, 17.7243, 17.3831, 17.7349, 18.4273, 18.7776,
18.4363,
18.7858, 18.4433, 18.7919, 18.4484, 18.796, 18.4514, 18.798])
y = np.array(
[0, 0.614323, 1.22918, 1.84277, 2.45796, 3.07061, 3.68594, 4.29746,
4.91271, 5.5229,
6.13789, 6.14356, 6.75902, 7.36787, 7.98272, 7.98968, 8.60474, 9.21184,
9.82596,
9.83398, 10.448, 11.053, 11.6658, 11.6747, 12.2871, 12.8896, 13.5004,
13.51, 14.1202,
14.1294, 14.7389, 14.7478, 15.3564, 15.3648, 15.9724, 15.9802, 16.5867,
16.5939,
17.1992, 17.2057, 17.8095, 17.8154, 18.4177, 18.4227, 19.0233, 19.0275,
18.4307,
18.4337, 19.0337, 19.0357, 18.4377, 18.4387, 19.0378, 19.0378, 18.4387,
18.4377,
19.0357, 19.0337, 18.4337, 18.4307, 19.0275, 19.0233, 18.4227, 18.4177,
17.8154,
17.8095, 17.2057, 17.1992, 16.5939, 16.5867, 15.9802, 15.9724, 15.3648,
15.3564,
14.7478, 14.7389, 14.1294, 14.1202, 13.51, 13.5004, 12.8896, 12.2871,
11.6747, 11.6658,
11.053, 10.448, 9.83398, 9.82596, 9.21184, 8.60474, 7.98968, 7.98272,
7.36787, 6.75902,
6.14356, 6.13789, 5.5229, 4.91271, 4.29746, 3.68594, 3.07061, 2.45796,
1.84277, 1.22918,
0.614323, 0, -0.614323, -1.22918, -1.84277, -2.45796, -3.07061,
-3.68594,
-4.29746, -4.91271, -5.5229, -6.13789, -6.14356, -6.75902, -7.36787,
-7.98272,
-7.98968, -8.60474, -9.21184, -9.82596, -9.83398, -10.448, -11.053,
-11.6658, -11.6747,
-12.2871, -12.8896, -13.5004, -13.51, -14.1202, -14.1294, -14.7389,
-14.7478, -15.3564,
-15.3648, -15.9724, -15.9802, -16.5867, -16.5939, -17.1992, -17.2057,
-17.8095,
-17.8154, -18.4177, -18.4227, -19.0233, -19.0275, -18.4307, -18.4337,
-19.0337,
-19.0357, -18.4377, -18.4387, -19.0378, -19.0378, -18.4387, -18.4377,
-19.0357,
-19.0337, -18.4337, -18.4307, -19.0275, -19.0233, -18.4227, -18.4177,
-17.8154,
-17.8095, -17.2057, -17.1992, -16.5939, -16.5867, -15.9802, -15.9724,
-15.3648,
-15.3564, -14.7478, -14.7389, -14.1294, -14.1202, -13.51, -13.5004,
-12.8896, -12.2871,
-11.6747, -11.6658, -11.053, -10.448, -9.83398, -9.82596, -9.21184,
-8.60474, -7.98968,
-7.98272, -7.36787, -6.75902, -6.14356, -6.13789, -5.5229, -4.91271,
-4.29746,
-3.68594, -3.07061, -2.45796, -1.84277, -1.22918, -0.614323])
# Rotation matrices
rot = rotdegree * np.pi / 180.00
mrot = np.array([[np.cos(rot), np.sin(rot)], [-np.sin(rot), np.cos(rot)]])
# rotation of coordinates of outer segments
u = mrot.dot([x, y])
pup = fillPolygon(u[0], u[1], i0, j0, pixscale, 0., npt)
# INTERNAL CONTOUR OF OBSCURATION
xx = np.array(
[5.02571, 4.66726, 5.02543, 4.66673, 5.02458, 4.66568, 3.94877, 3.58959,
2.87216,
2.51286, 1.7951, 1.43584, 0.717959, 0.358899, -0.358899, -0.717959,
-1.43584, -1.7951,
-2.51286, -2.87216, -3.58959, -3.94877, -4.66568, -5.02458, -4.66673,
-5.02543,
-4.66726, -5.02571, -4.66726, -5.02543, -4.66673, -5.02458, -4.66568,
-3.94877,
-3.58959, -2.87216, -2.51286, -1.7951, -1.43584, -0.717959, -0.358899,
0.358899,
0.717959, 1.43584, 1.7951, 2.51286, 2.87216, 3.58959, 3.94877, 4.66568,
5.02458,
4.66673, 5.02543, 4.66726])
yy = np.array(
[0., 0.62184, 1.24347, 1.86531, 2.48652, 3.10816, 3.10885, 3.73041,
3.73104,
4.35239, 4.35288, 4.97389, 4.97416, 5.59468, 5.59468, 4.97416, 4.97389,
4.35288,
4.35239, 3.73104, 3.73041, 3.10885, 3.10816, 2.48652, 1.86531, 1.24347,
0.62184,
0.0, -0.62184, -1.24347, -1.86531, -2.48652, -3.10816, -3.10885,
-3.73041,
-3.73104, -4.35239, -4.35288, -4.97389, -4.97416, -5.59468, -5.59468,
-4.97416,
-4.97389, -4.35288, -4.35239, -3.73104, -3.73041, -3.10885, -3.10816,
-2.48652,
-1.86531, -1.24347, -0.62184])
# rotation of coordinates of inner segments (central obs)
u = mrot.dot([xx, yy])
pup = pup & ~fillPolygon(u[0], u[1], i0, j0, pixscale, 0, npt)
# SPIDERS ............................................
nspider = 3 # pour le jour ou on voudra plus de spiders..
if (dspider > 0 and nspider > 0):
pup = pup & fillSpider(npt, nspider, dspider, i0, j0, pixscale, rot)
return pup
def createHexaPattern(pitch, supportSize):
"""
Cree une liste de coordonnees qui decrit un maillage hexagonal.
Retourne un tuple (x,y).
Le maillage est centre sur 0, l'un des points est (0,0).
Une des pointes de l'hexagone est dirigee selon l'axe Y, au sens ou le
tuple de sortie est (x,y).
:param float pitch: distance between 2 neighbour points
:param int supportSize: size of the support that need to be populated
"""
V3 = np.sqrt(3)
nx = int(np.ceil((supportSize / 2.0) / pitch) + 1)
x = pitch * (np.arange(2 * nx + 1) - nx)
ny = int(np.ceil((supportSize / 2.0) / pitch / V3) + 1)
y = (V3 * pitch) * (np.arange(2 * ny + 1) - ny)
x, y = np.meshgrid(x, y, indexing='ij')
x = x.flatten()
y = y.flatten()
peak_axis = np.append(x, x + pitch / 2.) # axe dirige selon sommet
flat_axis = np.append(y, y + pitch * V3 / 2.) # axe dirige selon plat
return flat_axis, peak_axis
def generateCoordSegments(D, rot):
"""
Computes the coordinates of the corners of all the hexagonal
segments of M1.
Result is a tuple of arrays(6, 798).
:param float D: D is the pupil diameter in meters, it must be set to 40.0 m
for the nominal EELT.
:param float rot: pupil rotation angle in radians
"""
V3 = np.sqrt(3)
pitch = 1.227314 # no correction du bol
pitch = 1.244683637214 # diametre du cerle INSCRIT
# diamseg = pitch*2/V3 # diametre du cercle contenant TOUT le segment
# print("segment diameter : %.6f\n" % diamseg)
# Creation d'un pattern hexa avec pointes selon la variable <ly>
lx, ly = createHexaPattern(pitch, 35 * pitch)
ll = np.sqrt(lx ** 2 + ly ** 2)
# Elimination des segments non valides grace a 2 nombres parfaitement
# empiriques ajustes a-la-mano.
inner_rad, outer_rad = 4.1, 15.4 # nominal, 798 segments
nn = (ll > inner_rad * pitch) & (ll < outer_rad * pitch);
lx = lx[nn]
ly = ly[nn]
lx, ly = reorganizeSegmentsOrderESO(lx, ly)
ll = np.sqrt(lx ** 2 + ly ** 2)
# n = ll.shape[0]
# print("Nbre de segments : %d\n" % n)
# Creation d'un hexagone-segment avec pointe dirigee vers
# variable <hx> (d'ou le cos() sur hx)
th = np.linspace(0, 2 * np.pi, 7)[0:6]
hx = np.cos(th) * pitch / V3
hy = np.sin(th) * pitch / V3
# Le maillage qui permet d'empiler des hexagones avec sommets 3h-9h
# est un maillage hexagonal avec sommets 12h-6h, donc a 90°.
# C'est pour ca qu'il a fallu croiser les choses avant.
x = (lx[None, :] + hx[:, None])
y = (ly[None, :] + hy[:, None])
r = np.sqrt(x ** 2 + y ** 2)
R = 95.7853
rrc = R / r * np.arctan(r / R) # correction factor
x *= rrc
y *= rrc
nominalD = 40.0 # size of the OFFICIAL E-ELT
if D != nominalD:
x *= D / nominalD
y *= D / nominalD
# Rotation matrices
mrot = np.array([[np.cos(rot), np.sin(rot)], [-np.sin(rot), np.cos(rot)]])
# rotation of coordinates
# le tableau [x,y] est de taille (2,6,798). Faut un transpose a la con
# pour le transformer en (6,2,798) pour pouvoir faire le np.dot
# correctement. En sortie, xrot est (2,6,798).
xyrot = np.dot(mrot, np.transpose(np.array([x, y]), (1, 0, 2)))
return xyrot[0], xyrot[1]
def reorganizeSegmentsOrderESO(x, y):
"""
Reorganisation des segments facon ESO.
Voir
ESO-193058 Standard Coordinate System and Basic Conventions
:param float x: tableau des centres X des segments
:param float y: idem Y
:return tuple (x,y): meme tuple que les arguments d'entree, mais tries.
"""
# pi/2, pi/6, 2.pi, ...
pi_3 = np.pi / 3
pi_6 = np.pi / 6
pix2 = 2 * np.pi
# calcul des angles
t = (np.arctan2(y, x) + pi_6 - 1e-3) % (pix2)
X = np.array([])
Y = np.array([])
A = 100.
for k in range(6):
sector = (t > k * pi_3) & (t < (k + 1) * pi_3)
u = k * pi_3
distance = (A * np.cos(u) - np.sin(u)) * x[sector] + (
np.cos(u) + A * np.sin(u)) * y[sector]
indsort = np.argsort(distance)
X = np.append(X, x[sector][indsort])
Y = np.append(Y, y[sector][indsort])
return X, Y
def getdatatype(truc):
"""
Returns the data type of a numpy variable, either scalar value or array.
"""
if np.isscalar(truc):
return type(truc)
else:
return type(truc.flatten()[0])
def generateSegmentProperties(attribute, hx, hy, i0, j0, scale, gap, N, D,
softGap=0):
"""
Builds a 2D image of the pupil with some attributes for each of the
segments. Those segments are described from arguments hx and hy, that
are produced by the function generateCoordSegments(D, rot).
When attribute is a phase, then it must be a float array of dimension
[3, 798] with the dimension 3 being piston, tip, and tilt.
Units of phase is xxx rms, and the output of the procedure will be
in units of xxx.
:returns: pupil image (N, N), with the same type of input argument attribute
:param float/int/bool attribute: scalar value or 1D-array of the
reflectivity of the segments or 2D array of phase
If attribute is scalar, the value will be replicated for all segments
If attribute is a 1D array, then it shall contain the reflectivities
of all segments.
If attribute is a 2D array then it shall contain the piston, tip
and tilt of the segments. The array shall be of dimension
[3, 798] that contains [piston, tip, tilt]
On output, the data type of the pupil map will be the same as input
:param float hx, hy: arrays [6,:] describing the segment shapes. They are
generated using generateCoordSegments()
:param float dspider: width of spiders in meters
:param float i0, j0: index of pixels where the pupil should be centred.
Can be floating-point indexes.
:param float scale: size of a pixel of the image, in meters.
:param float gap: half-space between segments in meters
:param int N: size of the output array (N,N)
:param float D: diameter of the pupil. For the nominal EELT, D shall
be set to 40.0
:param bool softGap: if False, the gap between segments is binary 0/1
depending if the pixel is within the gap or not. If True, the gap
is a smooth region of a fwhm of 2 pixels with a depth related to the
gap width.
"""
# number of segments
nseg = hx.shape[-1]
# If <attribute> is a scalar, then we make a list. It will be required
# later on to set the attribute to each segment.
if np.isscalar(attribute):
attribute = np.array([attribute] * nseg)
# the pupil map is created with the same data type as <attribute>
pupil = np.zeros((N, N), dtype=getdatatype(attribute))
# average coord of segments
x0 = np.mean(hx, axis=0)
y0 = np.mean(hy, axis=0)
# avg coord of segments in pixel indexes
x0 = x0 / scale + i0
y0 = y0 / scale + j0
# size of mini-support
hexrad = 0.75 * D / 40. / scale
ix0 = np.floor(x0 - hexrad).astype(int) - 1
iy0 = np.floor(y0 - hexrad).astype(int) - 1
segdiam = np.ceil(hexrad * 2 + 1).astype(int) + 1
n = attribute.shape[0]
if n != 3:
# attribute is a signel value : either reflectivity, or boolean,
# or just piston.
if softGap != 0:
# Soft gaps
# The impact of gaps are modelled using a simple function:
# Lorentz, 1/(1+x**2)
# The fwhm is always equal to 2 pixels because the gap is supposed
# to be "small/invisible/undersampled". The only visible thing is
# the width of the impulse response, chosen 2-pixel wide to be
# well sampled.
# The "depth" is related to the gap width. The integral of a
# Lorentzian of 2 pix wide is PI. Integral of a gap of width 'gap'
# in pixels is 'gap'.
# So the depth equals to gap/scale/np.pi.
for i in range(nseg):
indx, indy, distedge = fillPolygon(hx[:, i], hy[:, i],
i0 - ix0[i], j0 - iy0[i],
scale, gap * 0., segdiam,
index=1)
pupil[indx + ix0[i], indy + iy0[i]] = attribute[i] * (
1. - (gap / scale / np.pi) / (
1 + (distedge / scale) ** 2))
else:
# Hard gaps
for i in range(nseg):
indx, indy, distedge = fillPolygon(hx[:, i], hy[:, i],
i0 - ix0[i], j0 - iy0[i],
scale, gap, segdiam, index=1)
pupil[indx + ix0[i], indy + iy0[i]] = attribute[i]
else:
# attribute is [piston, tip, tilt]
minimap = np.zeros((segdiam, segdiam))
xmap = np.arange(segdiam) - segdiam / 2
xmap, ymap = np.meshgrid(xmap, xmap, indexing='ij') # [x,y] convention
pitch = 1.244683637214 # diameter of inscribed circle
diamseg = pitch * 2 / np.sqrt(3) # diameter of circumscribed circle
diamfrizou = (pitch + diamseg) / 2 * D / 40. # average diameter
# Calcul du facteur de mise a l'echelle pour l'unite des tilts.
# xmap et ymap sont calculees avec un increment de +1 pour deux pixels
# voisins, donc le facteur a appliquer est tel que l'angle se conserve
# donc factunit*1 / scale = 4*factunit
factunit = 4 * scale / diamfrizou
for i in range(nseg):
indx, indy, _ = fillPolygon(hx[:, i], hy[:, i], i0 - ix0[i],
j0 - iy0[i], scale, 0., segdiam,
index=1)
minimap = attribute[0, i] + (factunit * attribute[1, i]) * xmap + (
factunit * attribute[2, i]) * ymap
pupil[indx + ix0[i], indy + iy0[i]] = minimap[indx, indy]
return pupil
# _ _ ___ ____ _ _ _ _______ _______ _
# | | | |_ _/ ___| | | | | | | ____\ \ / / ____| |
# | |_| || | | _| |_| |_____| | | _| \ \ / /| _| | |
# | _ || | |_| | _ |_____| |___| |___ \ V / | |___| |___
# |_| |_|___\____|_| |_| |_____|_____| \_/ |_____|_____|
def getEeltSegmentNumber():
"""
Just returns the number of segments of the EELT nominal pupil, in order
to be able to generate either reflectivities, or phase errors, or else.
"""
hx, hy = generateCoordSegments(40., 0.)
n = hx.shape[-1]
return n
def generateEeltPupilMask(npt, dspider, i0, j0, pixscale, gap, rotdegree,
D=40.0, centerMark=0):
"""
Generates a boolean pupil mask of the binary EELT pupil
on a map of size (npt, npt).
:returns: pupil image (npt, npt), boolean
:param int npt: size of the output array
:param float dspider: width of spiders in meters
:param float i0, j0: index of pixels where the pupil should be centred.
Can be floating-point indexes.
:param float pixscale: size of a pixel of the image, in meters.
:param float gap: half-space between segments in meters
:param float rotdegree: rotation angle of the pupil, in degrees.
:param float D: diameter of the pupil. For the nominal EELT, D shall
be set to 40.0
:param int centerMark: when centerMark!=0, a pixel is added at the centre of
symmetry of the pupil in order to debug things using compass.
centerMark==1 draws a point
centerMark==2 draws 2 lines
:Example:
npt = 752
i0 = npt/2+0.5
j0 = npt/2+0.5
rotdegree = 90.0
pixscale = 40./npt
dspider = 0.53
gap = 0.02
pup = generateEeltPupilMask(npt, dspider, i0, j0, pixscale, gap, rotdegree)
"""
rot = rotdegree * np.pi / 180
# Generation of segments coordinates.
# hx and hy have a shape [6,798] describing the 6 vertex of the 798
# hexagonal mirrors
hx, hy = generateCoordSegments(D, rot)
# From the data of hex mirrors, we build the pupil image using
# boolean
pup = generateSegmentProperties(True, hx, hy, i0, j0, pixscale, gap, npt, D)
# SPIDERS ............................................
nspider = 3 # for the day where we have more/less spiders ;-)
if (dspider > 0 and nspider > 0):
pup = pup & fillSpider(npt, nspider, dspider, i0, j0, pixscale, rot)
# Rajout d'un pixel au centre (pour marquer le centre) ou d'une croix,
# selon la valeur de centerMark
if centerMark:
pup = np.logical_xor(pup, centrePourVidal(npt, i0, j0, centerMark))
return pup
def generateEeltPupilReflectivity(refl, npt, dspider, i0, j0, pixscale, gap,
rotdegree, D=40.0, softGap=False):
"""
Generates a map of the reflectivity of the EELT pupil, on an array
of size (npt, npt).
:returns: pupil image (npt, npt), with the same type of input argument refl
:param float/int/bool refl: scalar value or 1D-array of the reflectivity of
the segments.
If refl is scalar, the value will be replicated for all segments.
If refl is a 1D array, then it shall contain the reflectivities
of all segments.
On output, the data type of the pupil map will be the same as refl.
:param int npt: size of the output array
:param float dspider: width of spiders in meters
:param float i0, j0: index of pixels where the pupil should be centred.
Can be floating-point indexes.
:param float pixscale: size of a pixel of the image, in meters.
:param float gap: half-space between segments in meters
:param float rotdegree: rotation angle of the pupil, in degrees.
:param float D: diameter of the pupil. For the nominal EELT, D shall
be set to 40.0
:param bool softGap: if False, the gap between segments is binary 0/1
depending if the pixel is within the gap or not. If True, the gap
is a smooth region of a fwhm of 2 pixels with a depth related to the
gap width.
:Example:
refl = np.ones(798)+np.random.randn(798)/20.
dead = 3
refl[(np.random.rand(dead)*797).astype(int)] = 0.
npt = 1200
i0 = npt/2+0.5
j0 = npt/2+0.5
rotdegree = 14.0
pixscale = 44./npt
dspider = 0.53
gap = 0.02
pup = generateEeltPupilReflectivity(refl, npt, dspider, i0, j0, pixscale,
gap, rotdegree, softGap=True)
"""
rot = rotdegree * np.pi / 180
# Generation of segments coordinates.
# hx and hy have a shape [6,798] describing the 6 vertex of the 798
# hexagonal mirrors
hx, hy = generateCoordSegments(D, rot)
# From the data of hex mirrors, we build the pupil image according
# to the properties defined by input argument <refl>
pup = generateSegmentProperties(refl, hx, hy, i0, j0, pixscale, gap, npt, D,
softGap=softGap)
# SPIDERS ............................................
nspider = 3 # for the day where we have more/less spiders ;-)
if (dspider > 0 and nspider > 0):
pup = pup * fillSpider(npt, nspider, dspider, i0, j0, pixscale, rot)
return pup
def generateEeltPupilPhase(phase, npt, dspider, i0, j0, pixscale, rotdegree,
D=40.0):
"""
Generates a map of the segments phase errors of the EELT pupil, on an array
of size (npt, npt).
:returns: phase image (npt, npt), with the same type of input argument phase
:param float phase: scalar value or 2D-array of the piston, tip
and tilt of the segments. The array shall be of dimension
[3, 798] that contains [piston, tip, tilt]
:param int npt: size of the output array
:param float dspider: width of spiders in meters
:param float i0, j0: index of pixels where the pupil should be centred.
Can be floating-point indexes.
:param float pixscale: size of a pixel of the image, in meters.
:param float rotdegree: rotation angle of the pupil, in degrees.
:param float D: diameter of the pupil. For the nominal EELT, D shall
be set to 40.0
:Example:
phase = np.random.randn(3,798)
phase = np.zeros((3,798)); phase[1,:]=1.
npt = 752
i0 = npt/2+0.5
j0 = npt/2+0.5
rotdegree = 90.0
pixscale = 41./npt
dspider = 0.51
pup = generateEeltPupilPhase(phase, npt, dspider, i0, j0, pixscale,
rotdegree)
"""
rot = rotdegree * np.pi / 180
# Generation of segments coordinates.
# hx and hy have a shape [6,798] describing the 6 vertex of the 798
# hexagonal mirrors
hx, hy = generateCoordSegments(D, rot)
# From the data of hex mirrors, we build the pupil phase image according
# to the properties defined by input argument <phase>
pup = generateSegmentProperties(phase, hx, hy, i0, j0, pixscale, 0.0, npt,
D)
return pup
"""
refl = np.ones(798)+np.random.randn(798)/10.
N = npt = 800
i0 = N/2+0.5
j0 = N/2+0.5
rotdegree = 10.0
scale = pixscale = 41./N
dspider = 0.51
#smap = generateEeltPupil_slow(N, dspider, i0, j0, scale, rotdegree)
p = generateEeltPupilMask(N, dspider, i0, j0+10, scale, rotdegree)
plt.clf()
plt.matshow(p, fignum=1)
#p = generateEeltPupilReflectivity(refl, N, dspider, i0, j0, pixscale,
rotdegree, D=40.0)
phase = np.zeros((3,798)); phase[1,:]=1.
phase = np.random.randn(3,798)
p = generateEeltPupilPhase(phase, N, dspider, i0, j0, pixscale, rotdegree,
D=40.0)
plt.matshow(p, fignum=1)
N = 2048
i0 = N/2
j0 = N/2
rotdegree = 10.0
scale = pixscale = 40./800
dspider = 0.53
pup = generateEeltPupilMask(N, dspider, i0, j0, scale, rotdegree)
phase = np.random.randn(3,798) * 1. # en microns rms
phase = np.zeros((3,798)); phase[1,:]=0.3
delta = generateEeltPupilPhase(phase, N, dspider, i0, j0, pixscale, rotdegree)
lam = 1.65 # microns
F = np.exp((1j*2*np.pi/lam)*delta) * pup
psf = np.fft.fftshift(np.abs(np.fft.fft2(F))) / np.sum(pup)
"""
|
AnisoCADO
|
/AnisoCADO-0.3.0.tar.gz/AnisoCADO-0.3.0/anisocado/pupil_utils.py
|
pupil_utils.py
|
import struct
class controllerClass():
def __init__(self, carClass):
self.carClass = carClass
async def _setSpeed(self, speed:int, accel:int):
# Parameters:
# speed -- Desired speed. (from 0 - 1000)
# accel -- Desired acceleration. (from 0 - 1000)
self.speed = speed
self.accel = accel
if speed > 1000 or accel > 1000:
print("Speed/Accellimit exceeded!")
if speed < 0 or accel < 0:
print("Not valid input!")
else:
command = struct.pack("<BHHB", 0x24, speed, accel, 0x01)
await self.carClass._sendCommand(command)
async def _setLane(self, offset:float):
command = struct.pack("<Bf", 0x2c, offset)
await self.carClass._sendCommand(command)
async def _changeLane(self, speed:int, accel:int, offset:float):
command = struct.pack("<BHHf", 0x25, speed, accel, offset)
await self.carClass._sendCommand(command)
async def _changeLaneLeft(self, speed:int, accel:int, lanes:int):
await self._changeLane(speed, accel, 44.5 * lanes)
async def changeLaneRight(self, speed:int, accel:int, lanes:int):
await self.changeLane(speed, accel, -44.5 * lanes)
# -------------------------------------------------------------------------------------
def changeLaneLeft(self, speed:int, accel:int, lanes:int):
self.changeLane(speed, accel, 44.5 * lanes)
def changeLaneRight(self, speed:int, accel:int, lanes:int):
self.changeLane(speed, accel, -44.5 * lanes)
def setLane(self, offset:float):
command = struct.pack("<Bf", 0x2c, offset)
self.carClass.sendCommand(command)
def changeLane(self, offset:float):
self.setLane(0.0)
command = struct.pack("<BHHf", 0x25, self.speed, self.accel, offset)
self.carClass.sendCommand(command)
def setSpeed(self, speed:int, accel:int):
self.speed = speed
self.accel = accel
if speed > 1000 or accel > 1000:
print("Speed/Accellimit exceeded!")
if speed < 0 or accel < 0:
print("Not valid input!")
else:
command = struct.pack("<BHHB", 0x24, speed, accel, 0x01)
print(command)
self.carClass.sendCommand(command)
|
Anki-Overdrive-Windows
|
/Anki-Overdrive-Windows-0.5.3.tar.gz/Anki-Overdrive-Windows-0.5.3/controller.py
|
controller.py
|
# Anki Overdrive Windows SDK
## Prologue
The following library is based off Python 3.11 and with the usage of the [bleak](https://github.com/hbldh/bleak) bluetooth library and [asyncio](https://github.com/python/asyncio/tree/master).
Julius and yours truly have decided to program the library because there wasn't an anki overdrive sdk for the windows usage.
## Installation
1. Install Python 3.11 (I'd recommend 3.9) if you don't already have Python installed
2. Run the following commands:
```
pip install bleak
pip install asyncio
```
3. Have fun and I'd suggest to ```await _functions()```
## Example
```
from anki_sdk.cars import *
from anki_sdk.controller import *
import anki_sdk.utils as utils
import time
async def testDef():
test_car = carClass("XX:XX:XX:XX:XX:XX")
test_car_controller = controllerClass(test_car)
await test_car._connect()
await test_car_controller._setSpeed(1000, 1000)
time.sleep(3)
await test_car._disconnect()
asyncio.run(testDef())
```
|
Anki-Overdrive-Windows
|
/Anki-Overdrive-Windows-0.5.3.tar.gz/Anki-Overdrive-Windows-0.5.3/README.md
|
README.md
|
# AnkiChinese
Asynchronously scrape the ArchChinese dictionary to generate Anki flashcards with:
- Pinyin & Audio
- Definitions
- Example words
- Formation/Origin
- Stroke order diagrams
- HSK level
# Installation
pip install ankichinese
# Usage
ankichinese
-h, --help Show help message and exit
--export, -x {anki, csv, update} Export mode (default: anki)
anki: Generate new AnkiChinese deck
csv: Generate CSV file
update: Update existing deck
--input, -i INPUT Input file with characters to scrape (default: input.txt)
--output, -o OUTPUT Name of output file (do not include extension)
(default: ankichinese_output)
--definitions, -def NUM Number of definitions to scrape per character (default: 5)
--examples, -ex NUM Number of example words to scrape per character (default: 3)
--requests-at-once, -r NUM Maximum number of requests at once (default: 10)
--requests-per-second, -rs NUM Maximum number of requests per second (default: 5)
## Generate New AnkiChinese Deck
How to create an entirely new Anki deck with the name `ankichinese_output.apkg` in the current directory using custom AnkiChinese styling.
1. Create `input.txt` with the characters you want to scrape.
2. Run `ankichinese -x anki`.
3. Open Anki and import `ankichinese_output.apkg`.
**Updating is Easy!**
Just run `ankichinese` again with new characters in `input.txt` and import the new `ankichinese_output.apkg` file into Anki. Anki will automatically update the existing deck without losing progress.
## Update Existing (Non-AnkiChinese) Deck Without Losing Progress
1. Create `input.txt` with the characters you want to scrape (can be the same as the existing deck).
2. Run `ankichinese -x update`.
3. Choose deck and model of cards to update. The model must include a field named `Hanzi`.
4. Import the new `ankichinese_audio.apkg` file into Anki. This will import the audio files (and create an empty deck that can be deleted)
# Tools Used
- Asynchronous I/O: [Asyncio](https://docs.python.org/3/library/asyncio.html)
- Limit concurrency: [Aiometer](https://github.com/florimondmanca/aiometer)
- Web Automation and HTML Interaction: [Playwright](https://playwright.dev/python/)
- Anki deck generation: [Genanki](https://github.com/kerrickstaley/genanki)
- Progress bars: [tqdm](https://github.com/tqdm/tqdm)
- HTML parsing and scraping: [Beautiful Soup](https://www.crummy.com/software/BeautifulSoup/)
- Data manipulation: [Pandas](https://pandas.pydata.org/)
# Credits
Stroke order diagrams:
- Online stroke order diagrams: [Hanzi Writer](https://hanziwriter.org/)
- Offline stroke order font: [Reinaert Albrecht](https://rtega.be/chmn/index.php?subpage=68)
Chinese audio:
- [Yoyo Chinese](https://yoyochinese.com/chinese-learning-tools/Mandarin-Chinese-pronunciation-lesson/pinyin-chart-table)
- Neutral tones: [Purple Culture](https://www.purpleculture.net/chinese_pinyin_chart/)
|
AnkiChinese
|
/AnkiChinese-1.3.5.tar.gz/AnkiChinese-1.3.5/README.md
|
README.md
|
# AnkiIn
[![Release][release-shield]][release-url]
[![MIT License][license-shield]][license-url]
[![Issues][issues-shield]][issues-url]
[![Stargazers][stars-shield]][stars-url]
[![Forks][forks-shield]][forks-url]
[![Contributors][contributors-shield]][contributors-url]
[![CodeFactor][codefactor-shield]][codefactor-url]
See [AnkiLink Project](https://github.com/users/Clouder0/projects/1) for future plans.
## Introduction
AnkiIn is a Python Package that enables you to generate Anki cards from markdown text.
It is easy to use and powerful.
**ATTENTION! This repo is for developers, so if you just want to use an Anki Importer, please check [AnkiLink](https://github.com/Clouder0/AnkiLink)**
Features:
- Directly Import into Anki via anki-connect
- Directly Export to `apkg` file with Anki offline via genanki
- Inline Configuration
- Human-Friendly Syntax
- Markdown Rendering Support
- Html Support
- Cross-Platform
- Many Useful built-in Note Types:
- Q&A
- Cloze
- Choices
- ListCloze
- TableCloze
- Easy to Extend
---
To use this lib, you are not required to sacrifice your note readability for compatibility with Anki.
Here is a quick example:
```markdown
This is a question.
This is an answer.
Single line question.
Multiple line answer.
The first line of this block is recognized as the question.
Multiple line question is <br> possible somehow.
too hacky maybe.
markdown rendering is supported.
- use a list!
- or something like that.
Clozes are **easy** to **create** too.
```
## Installation
You can install by pip:
```bash
pip install AnkiIn
```
### install Anki Connect
To use the lib, you need to install [AnkiConnect](https://github.com/FooSoft/anki-connect) extension.
Code:`2055492159`
For detailed installation guide, please visit the [anki-connect repo](https://github.com/FooSoft/anki-connect).
## Usage
You can review [AnkiLink](https://github.com/Clouder0/AnkiLink) to understand how this works.
Also, [AnkiIn Wiki](https://github.com/Clouder0/AnkiIn/wiki) is under construction.
For more syntax examples, see [tests](https://github.com/Clouder0/AnkiIn/tree/main/tests).
## Applications
I'd like to list some applications using AnkiIn here.
If you want to add yours, please create an issue/pull request.
- [AnkiLink](https://github.com/Clouder0/AnkiLink)
## Credit
- [anki](https://github.com/ankitects/anki)
- [anki-connect](https://github.com/FooSoft/anki-connect)
- [genanki](https://github.com/kerrickstaley/genanki)
- [markdown2](https://github.com/trentm/python-markdown2)
## License
The source code is licensed under MIT.
License is available [here](https://github.com/Clouder0/AnkiIn/blob/main/LICENSE).
[contributors-shield]: https://img.shields.io/github/contributors/Clouder0/AnkiIn.svg
[contributors-url]: https://github.com/Clouder0/AnkiIn/graphs/contributors
[forks-shield]: https://img.shields.io/github/forks/Clouder0/AnkiIn.svg
[forks-url]: https://github.com/Clouder0/AnkiIn/network/members
[stars-shield]: https://img.shields.io/github/stars/Clouder0/AnkiIn.svg
[stars-url]: https://github.com/Clouder0/AnkiIn/stargazers
[issues-shield]: https://img.shields.io/github/issues/Clouder0/AnkiIn.svg
[issues-url]: https://github.com/Clouder0/AnkiIn/issues
[license-shield]: https://img.shields.io/github/license/Clouder0/AnkiIn.svg
[license-url]: https://github.com/Clouder0/AnkiIn/blob/main/LICENSE
[release-shield]: https://img.shields.io/github/release/Clouder0/AnkiIn.svg
[release-url]: https://github.com/Clouder0/AnkiIn/releases
[codefactor-shield]: https://www.codefactor.io/repository/github/clouder0/AnkiIn/badge/main
[codefactor-url]: https://www.codefactor.io/repository/github/clouder0/AnkiIn/overview/main
|
AnkiIn
|
/AnkiIn-0.1.7.tar.gz/AnkiIn-0.1.7/README.md
|
README.md
|
import os
from pathlib import Path
import asyncio
import re
import ssl
from random import randrange
from typing import NoReturn
import unicodedata
import sys
import aiohttp
import genanki
import csv
import json
# Build paths inside the project like this: BASE_DIR / 'subdir'.
class AnkiCardOTron(object):
"""
Class that represents an Anki Deck builder
It responsible for receiving inputs in English and
creating an Anki Deck with it's translations
The flow works as follows:
instantiate the class with a file_path or word_list
instance = AnkiCardOTron(word_list=my_word_list)
add extra words, or pop undesired words
instance.add_words(['example1', 'example2'])
instance.pop_word('example1')
translate the words
instance.translate()
generate the deck
instance.generate_deck(path)
"""
def __init__(self, **kwargs):
"""
Constructor for the class. It requires either file_path, word_list,
but no both, or, alternatively, you can set empty=true
Args:
deck_name (str):
The name that the output should have
file_path(a file path, str):
A file path for a CSV file containing one word per cell
in_memory(bool):
if set to true, file_path is read as a file in memory
word_list(list(str)):
A list of words to be translated
empty(bool):
let's you create an instance without word_list/file_path
language(str):
let you define the input language (not implemented)
IMPLEMENT: model, template, from_kindle
"""
for key, value in kwargs.items():
setattr(self, key, value)
if not hasattr(self, "empty"):
if (not hasattr(self, "file_path")) and (not hasattr(self, "word_list")):
raise NameError("You must pass a word_list or file_path as an argument")
if hasattr(self, "file_path") and hasattr(self, "word_list"):
raise NameError("You must pass either word_list or file_path, not both")
# define the input type
self.csv = False if hasattr(self, "word_list") else True
if not hasattr(self, "language"):
self.language = "Hebrew"
if not hasattr(self, "deck_name"):
self.deck_name = "anki_deck" + str(randrange(1 << 30, 1 << 31))
# make word_list private.
if hasattr(self, "word_list"):
self.__word_list = self.word_list
delattr(self, "word_list")
## TODO: implemenent a way to modify the model
self.my_deck = genanki.Deck(randrange(1 << 30, 1 << 31), self.deck_name)
self.list_of_fields = {
self.language,
"Translation",
"Token",
"Classification",
"Multiple_Meaning",
}
self.df_main_table = {}
self.__create_model()
self.__errorHandler = self.AnkiTronError()
self.number_errors = self.__errorHandler.number_errors
self.input_errors = self.__errorHandler.input_errors
self.translate_errors = self.__errorHandler.translate_errors
self.errors = self.__errorHandler.errors
# if it's empty, do not process the file/list
if hasattr(self, "empty"):
return
self.__open_file()
def __open_file(self) -> NoReturn:
""" Takes the file handlers or the word_list and ingest """
## TODO: implement cleanup
# for in_memory compatibilitY
if hasattr(self, "in_memory") and self.csv:
input_list = self.file_path.read().decode("utf-8-sig").splitlines()
else:
if self.csv:
try:
with open(self.file_path, newline="", encoding="utf-8-sig") as f:
input_list = [line.strip() for line in f]
# check for two words in each input
except FileNotFoundError as input_not_found:
raise FileNotFoundError(
"The CSV file doesn't exist"
) from input_not_found
else:
input_list = self.__word_list
self.__word_list = self.__format_input(input_list)
def serialize(self) -> str:
"""
Returns a json string representation of the data
"""
return json.dumps(self.df_main_table, ensure_ascii=False)
def deserialize(self, serialized: str) -> NoReturn:
"""
Load json data into unprocessed words
"""
new_data = json.loads(serialized)
self.df_main_table.update(new_data)
def __format_input(self, input_list: list) -> str:
"""
Receive a list of words from the user and format it
return a list containing only words in Hebrew
Does not separate multiple words as it may represent
an expression
"""
# some punctuations are excluded due to beeing used in Hebrew
word_list_tmp = []
punctuation = r""" !"#$%&()*+,-./:;<=>?@[\]^_{|}~"""
for input in input_list:
tmp = input.split(",")
for word in tmp:
regex = re.compile("[%s]" % re.escape(punctuation))
word_list_tmp.append(regex.sub("", word))
word_list_tmp = [x for x in word_list_tmp if x]
word_list = word_list_tmp.copy()
for word in word_list:
if self.__are_words(word):
pass
else:
self.__errorHandler.create_error(
word, "The token was not identified as Hebrew", "Input"
)
word_list_tmp.remove(word)
return word_list_tmp
def __are_words(self, word):
""" check if the str are words in Hebrew"""
return any(
char in set("בגדהוזחטיכךלמנסעפצקרשתםןףץ")
for char in word.lower()
)
def get_unprocessed_words(self) -> list:
"""return all untranslated words """
return self.__word_list
def get_processed_words(self) -> list:
"""return all translated words"""
processed_words = []
for keys in self.df_main_table.keys():
processed_words.append(keys)
return processed_words
def add_words(self, input_words: list) -> NoReturn:
"""
Use to add extra words to the deck, after you should perform
translate -> add notes normally.
It shouldnt be called before calling create_note on the initial words
it deletes all words that are in the "staging" area
"""
assert type(input_words) == list, "You must provide a list of words"
self.__word_list = self.__format_input(input_words)
def pop_word(self, word: str) -> NoReturn:
"""
Remove the word from the processed words
If not found,from the unprocessed words
if not found again, returns not found
"""
assert type(input_words) == str, "You must provide a string"
if word in self.df_main_table.keys():
del self.df_main_table[word]
elif word in self.__word_list:
self.__word_list.pop(word)
else:
return "not found"
def translate(self):
""" Async wrapper that calls the async api call"""
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(self.__IO_main(loop))
loop.close()
self.__save_card()
# empty word list after process is done.
self.__word_list = []
async def __IO_main(self, loop: object) -> NoReturn:
""" create the session using aiohttp"""
headers = self.__get_headers()
async with aiohttp.ClientSession(loop=loop, headers=headers) as session:
response = await asyncio.gather(
*[self.__API_call(session, word) for word in self.__word_list],
return_exceptions=False,
)
async def __API_call(self, session: object, word: str) -> NoReturn:
""" call the API """
body = {"Query": word, "ClientName": "Android_Hebrew"}
url = "http://services.morfix.com/translationhebrew/TranslationService/GetTranslation/"
async with session.post(url, json=body, ssl=ssl.SSLContext()) as response:
if response.reason == "OK":
await self.__extract_response(await response.json(), word)
else:
self.__errorHandler.__create_error(word, response.reason, "translate")
async def __extract_response(self, html: str, word: str) -> str:
"""
extract the data from the api response
it receive the response from API call if it's okay
it extracts the correct data, while also creating the fields to be inserted in the notes
observation: the fields in the table must be the same as in the __create_mode
"""
if html["ResultType"] == "Match":
meaning = html["Words"][0]
table = {
"Hebrew": word,
"Translation": meaning["OutputLanguageMeaningsString"],
"Token": meaning["InputLanguageMeanings"][0][0]["DisplayText"],
"Classification": meaning["PartOfSpeech"],
}
if len(html["Words"]) == 1:
table["Multiple_Meaning"] = True
else:
table["Multiple_Meaning"] = False
self.df_main_table[word] = table
else:
self.__errorHandler.create_error(word, html["ResultType"], "Translation")
def __get_headers(self):
""" get the headers for the API CALL"""
return {
"accept": "*/*",
"Host": "services.morfix.com",
"Content-Type": "application/json",
}
def generate_deck(self, path: str) -> str:
"""
Generate the deck in the given path
It crashes if the path it's not valid
"""
deck_filename = self.deck_name.lower().replace(" ", "_")
my_package = genanki.Package(self.my_deck)
# my_package.media_files = self.audio_paths # TODO: Kindle implementation
output_path = os.path.join(path)
if not os.path.exists(output_path):
os.makedirs(output_path)
self.deck_path = os.path.join(output_path, deck_filename + ".apkg")
my_package.write_to_file(self.deck_path)
return self.deck_path
# returns the path to the deck
def __create_model(self):
""" create the card model"""
model_fields = []
for field in [
field for field in self.list_of_fields if field != "Multiple_Meaning"
]:
model_fields.append({"name": field})
self.my_model = genanki.Model(
randrange(1 << 30, 1 << 31),
"DAnkiModel",
fields=model_fields,
templates=[
{
"name": "{Card}",
"qfmt": '<div style="color:blue;text-align:center;font-size:25px"><b>{{Token}}</div></b><br><b>Word:</b> {{Hebrew}}<br> <b>Word class:</b> {{Classification}}',
"afmt": '{{FrontSide}}<hr id="answer"><div style="color:black;text-align:center;font-size:25px"><b>Translation</div></b>{{Translation}}',
},
],
)
def __create_card(self, data: dict) -> NoReturn:
"""
create a a card
Args:
data(dict)
This dict contains all the fields required for the card creation
e.g. word, translation as define in extract resposne in the api call
"""
## must receive a dictionary with each field and it's value
# create a Note
note_fields = []
# append fields besides Multiple Meaning, that is used for return use
for field in [i for i in self.list_of_fields if i != "Multiple_Meaning"]:
note_fields.append(unicodedata.normalize("NFKC", data[field]))
my_note = genanki.Note(
model=self.my_model,
fields=note_fields,
)
self.my_deck.add_note(my_note)
def __save_card(self) -> str:
""" Create a note for each available processed word """
for key, value in self.df_main_table.items():
self.__create_card(value)
class AnkiTronError(object):
"""
An Error wrapper for AnkiTron
"""
def __init__(self):
self.num_errors = 0
self.translated = False
self.error_list = []
def create_error(self, word: str, error: object, typeE: str):
if not (typeE == "Input" or typeE == "Translation"):
raise TypeError('The error should be "Translation" or "Input"')
self.error_list.append({"word": word, "error": error, "type": typeE})
def input_errors(self):
input_errors = 0
for item in self.error_list:
if item["type"] == "Input":
input_errors += 1
return input_errors
def translate_errors(self):
if not self.translated:
msg = "You must call `.translate()` before accessing `.number_errors`."
raise AssertionError(msg)
translate_errors = 0
for item in self.error_list:
if item["type"] == "translate":
translate_errors += 1
return translate_errors
def number_errors(self):
return len(self.error_list)
def errors(self):
return self.error_list
def __set_translated(self):
self.translated = True
|
AnkiOTron
|
/AnkiOTron-1.2-py3-none-any.whl/AnkiCardOTron/AnkiCardOTron.py
|
AnkiCardOTron.py
|
Anki Server
===========
`Anki <http://ankisrs.net>`_ is a powerful Open Source flashcard
application, which helps you quickly and easily memorize facts over
the long term utilizing a spaced repetition algorithm.
Anki's main form is a desktop application (for Windows, Linux and
MacOS) which can sync to a web version (AnkiWeb) and mobile versions
for Android and iOS.
This is a personal Anki Server, which you can sync against instead of
AnkiWeb.
It also includes a RESTful API, so that you could implement your
own AnkiWeb-like site if you wanted.
It was originally developed to support the flashcard functionality on
`Bibliobird <http://en.bibliobird.com>`_, a web application for
language learning.
Installing the easy way!
------------------------
If you have ``easy_install`` or ``pip`` on your system, you can
simply run::
$ easy_install AnkiServer
Or using ``pip``::
$ pip install AnkiServer
This will give you the latest released version!
However, if you want to try the latest bleeding edge version OR you
want to help with development, you'll need to install from source.
In that case, follow the instructions in the next two sections.
Setting up a virtualenv
-----------------------
If you want to install your Anki Server in an isolated Python
environment using
`virtualenv <https://pypi.python.org/pypi/virtualenv>`_, please
follow these instructions before going on to the next section. If
not, just skip to the "Installing" section below.
There are many reasons for installing into a virtualenv, rather
than globally on your system:
- You can keep the Anki Server's dependencies seperate from other
Python applications.
- You don't have permission to install globally on your system
(like on a shared host).
Here are step-by-step instruction for setting up your virtualenv:
1. First, you need to install "virtualenv". If your system has
``easy_install`` or ``pip``, this is just a matter of::
$ easy_install virtualenv
Or using pip::
$ pip install virtualenv
Or you can use your the package manager provided by your OS.
2. Next, create your a Python environment for running AnkiServer::
$ virtualenv AnkiServer.env
3. (Optional) Enter the virtualenv to save you on typing::
$ . AnkiServer.env/bin/activate
If you skip step 3, you'll have to type
``AnkiServer.env/bin/python`` instead of ``python`` and
``AnkiServer.env/bin/paster`` instead of ``paster`` in the following
sections.
Also, remember that the environment change in step 3 only lasts as
long as your current terminal session. You'll have to re-enter the
environment if you enter that terminal and come back later.
Installing your Anki Server from source
---------------------------------------
1. Install all the dependencies we need using ``easy_install`` or
``pip``::
$ easy_install webob PasteDeploy PasteScript sqlalchemy simplejson
Or using pip::
$ pip install webob PasteDeploy PasteScript sqlalchemy simplejson
Or you can use your the package manager provided by your OS.
2. Download and install libanki. You can find the latest release of
Anki here:
http://code.google.com/p/anki/downloads/list
Look for a \*.tgz file with a Summary of "Anki Source". At the time
of this writing that is anki-2.0.11.tgz.
Download this file and extract.
Then either:
a. Run the 'make install', or
b. Copy the entire directory to /usr/share/anki
3. Make the egg info files (so paster can see our app)::
$ python setup.py egg_info
Configuring and running your Anki Server
----------------------------------------
1. Copy the example.ini to production.ini in your current directory
and edit for your needs.
a. If you installed from source, it'll be at the top-level.
b. If you installed via 'easy_install' or 'pip', you'll find all
the example configuration at
``python_prefix/lib/python2.X/site-packages/AnkiServer-2.X.X-py2.X.egg/examples``
(replacing ``python_prefix`` with the root of your Python and
all the ``X`` with the correct versions). For example, it could
be::
/usr/lib/python2.7/site-packages/AnkiServer-2.0.0a6-py2.7.egg/examples/example.ini
3. Create user::
$ ./ankiserverctl.py adduser <username>
4. Test the server by starting it debug mode::
$ ./ankiserverctl.py debug
If the output looks good, you can stop the server by pressing Ctrl-C and start it again in normal mode::
$ ./ankiserverctl.py start
To stop AnkiServer, run::
$ ./ankiserverctl.py stop
Point the Anki desktop program at it
------------------------------------
Unfortunately, there isn't currently any user interface in the Anki
destop program to point it at your personal sync server instead of
AnkiWeb, so you'll have to write a short "addon".
Create a file like this in your Anki/addons folder called
"mysyncserver.py"::
import anki.sync
anki.sync.SYNC_BASE = 'http://127.0.0.1:27701/'
anki.sync.SYNC_MEDIA_BASE = 'http://127.0.0.1:27701/msync/'
Be sure to change the SYNC_URL to point at your sync server. The
address ``127.0.0.1`` refers to the local computer.
Restart Anki for your plugin to take effect. Now, everytime you sync,
it will be to your personal sync server rather than AnkiWeb.
However, if you just want to switch temporarily, rather than creating
an addon, you can set the ``SYNC_URL`` environment variable when
running from the command-line (on Linux)::
export SYNC_URL=http://127.0.0.1:27701/sync/
./runanki &
Point the mobile apps at it
---------------------------
At the moment, there isn't any way to get AnkiDroid or the Anki iOS
app to point at your personal sync server. :-/
However, there are an issue open on AnkiDroid about it:
- `Option to sync with personal sync server · Issue #1057
<https://github.com/ankidroid/Anki-Android/issues/1057>`_
If you're interested in seeing this feature, please go to this link
and let the maintainers know!
Running with Supervisor
-----------------------
If you want to run your Anki server persistantly on a Linux (or
other UNIX-y) server, `Supervisor <http://supervisord.org>`_ is a
great tool to monitor and manage it. It will allow you to start it
when your server boots, restart it if it crashes and easily access
it's logs.
1. Install Supervisor on your system. If it's Debian or Ubuntu this
will work::
$ sudo apt-get install supervisor
If you're using a different OS, please try
`these instructions <http://supervisord.org/installing.html>`_.
2. Copy ``supervisor-anki-server.conf`` to ``/etc/supervisor/conf.d/anki-server.conf``::
$ sudo cp supervisor-anki-server.conf /etc/supervisor/conf.d/anki-server.conf
3. Modify ``/etc/supervisor/conf.d/anki-server.conf`` to match your
system and how you setup your Anki Server in the section above.
4. Reload Supervisor's configuration::
$ sudo supervisorctl reload
5. Check the logs from the Anki Server to make sure everything is
fine::
$ sudo supervisorctl tail anki-server
If it's empty - then everything's fine! Otherwise, you'll see an
error message.
Later if you manually want to stop, start or restart it, you can
use::
$ sudo supervisorctl stop anki-server
$ sudo supervisorctl start anki-server
$ sudo supervisorctl restart anki-server
See the `Supervisor documentation <http://supervisord.org>`_ for
more info!
Using with Apache
-----------------
If you're already serving your website via Apache (on port 80) and
want to also allow users to sync against a URL on port 80, you can
forward requests from Apache to the Anki server.
On Bibliobird.com, I have a special anki.bibliobird.com virtual host
which users can synch against. Here is an excerpt from my Apache
conf::
<VirtualHost *:80>
ServerAdmin [email protected]
ServerName anki.bibliobird.com
# The Anki server handles gzip itself!
SetEnv no-gzip 1
<Location />
ProxyPass http://localhost:27701/
ProxyPassReverse http://localhost:27701/
</Location>
</VirtualHost>
It may also be possible to use `mod_wsgi
<http://code.google.com/p/modwsgi/>`_, however, I have no experience
with that.
How to get help
---------------
If you're having any problems installing or using Anki Server, please
post a message on our Google Group:
https://groups.google.com/forum/#!forum/anki-sync-server
Be sure to let us know which operating system and version you're using
and how you intend to use the Anki Server!
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/README.rst
|
README.rst
|
import os
import sys
import signal
import subprocess
import binascii
import getpass
import hashlib
import sqlite3
SERVERCONFIG = "production.ini"
AUTHDBPATH = "auth.db"
PIDPATH = "/tmp/ankiserver.pid"
COLLECTIONPATH = "collections/"
def usage():
print "usage: "+sys.argv[0]+" <command> [<args>]"
print
print "Commands:"
print " start [configfile] - start the server"
print " debug [configfile] - start the server in debug mode"
print " stop - stop the server"
print " adduser <username> - add a new user"
print " deluser <username> - delete a user"
print " lsuser - list users"
print " passwd <username> - change password of a user"
def startsrv(configpath, debug):
if not configpath:
configpath = SERVERCONFIG
# We change to the directory containing the config file
# so that all the paths will be relative to it.
configdir = os.path.dirname(configpath)
if configdir != '':
os.chdir(configdir)
configpath = os.path.basename(configpath)
if debug:
# Start it in the foreground and wait for it to complete.
subprocess.call( ["paster", "serve", configpath], shell=False)
return
devnull = open(os.devnull, "w")
pid = subprocess.Popen( ["paster", "serve", configpath],
stdout=devnull,
stderr=devnull).pid
with open(PIDPATH, "w") as pidfile:
pidfile.write(str(pid))
def stopsrv():
if os.path.isfile(PIDPATH):
try:
with open(PIDPATH) as pidfile:
pid = int(pidfile.read())
os.kill(pid, signal.SIGKILL)
os.remove(PIDPATH)
except Exception, error:
print >>sys.stderr, sys.argv[0]+": Failed to stop server: "+error.message
else:
print >>sys.stderr, sys.argv[0]+": The server is not running"
def adduser(username):
if username:
print "Enter password for "+username+": "
password = getpass.getpass()
salt = binascii.b2a_hex(os.urandom(8))
hash = hashlib.sha256(username+password+salt).hexdigest()+salt
conn = sqlite3.connect(AUTHDBPATH)
cursor = conn.cursor()
cursor.execute( "CREATE TABLE IF NOT EXISTS auth "
"(user VARCHAR PRIMARY KEY, hash VARCHAR)")
cursor.execute("INSERT INTO auth VALUES (?, ?)", (username, hash))
if not os.path.isdir(COLLECTIONPATH+username):
os.makedirs(COLLECTIONPATH+username)
conn.commit()
conn.close()
else:
usage()
def deluser(username):
if username and os.path.isfile(AUTHDBPATH):
conn = sqlite3.connect(AUTHDBPATH)
cursor = conn.cursor()
cursor.execute("DELETE FROM auth WHERE user=?", (username,))
conn.commit()
conn.close()
elif not username:
usage()
else:
print >>sys.stderr, sys.argv[0]+": Database file does not exist"
def lsuser():
conn = sqlite3.connect(AUTHDBPATH)
cursor = conn.cursor()
cursor.execute("SELECT user FROM auth")
row = cursor.fetchone()
while row is not None:
print row[0]
row = cursor.fetchone()
conn.close()
def passwd(username):
if os.path.isfile(AUTHDBPATH):
print "Enter password for "+username+": "
password = getpass.getpass()
salt = binascii.b2a_hex(os.urandom(8))
hash = hashlib.sha256(username+password+salt).hexdigest()+salt
conn = sqlite3.connect(AUTHDBPATH)
cursor = conn.cursor()
cursor.execute("UPDATE auth SET hash=? WHERE user=?", (hash, username))
conn.commit()
conn.close()
else:
print >>sys.stderr, sys.argv[0]+": Database file does not exist"
def main():
argc = len(sys.argv)
exitcode = 0
if argc < 2:
usage()
exitcode = 1
else:
if argc < 3:
sys.argv.append(None)
if sys.argv[1] == "start":
startsrv(sys.argv[2], False)
elif sys.argv[1] == "debug":
startsrv(sys.argv[2], True)
elif sys.argv[1] == "stop":
stopsrv()
elif sys.argv[1] == "adduser":
adduser(sys.argv[2])
elif sys.argv[1] == "deluser":
deluser(sys.argv[2])
elif sys.argv[1] == "lsuser":
lsuser()
elif sys.argv[1] == "passwd":
passwd(sys.argv[2])
else:
usage()
exitcode = 1
sys.exit(exitcode)
if __name__ == "__main__":
main()
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/ankiserverctl.py
|
ankiserverctl.py
|
Please see the README file for basic requirements.
In addition to the basic requirements, you also need the PyQt development
tools (specifically pyrcc4 and pyuic4). These are often contained in a
separate package on Linux, such as 'pyqt4-dev-tools' on Debian/Ubuntu. On a Mac
they are part of the PyQt source install.
Windows users, please see the note at the bottom of this file before proceeding.
To use the development version:
$ git clone https://github.com/dae/anki.git
$ cd anki
$ ./tools/build_ui.sh
If you get any errors, you will not be able to proceed, so please return to
the top and check the requirements again.
ALL USERS: Make sure you rebuild the UI every time you git pull, otherwise you
will get errors down the road.
The translations are stored in a bazaar repo for integration with Launchpad's
translation services. If you want to use a language other than English:
$ cd ..
$ mv anki dtop # i18n code expects anki folder to be called dtop
$ bzr clone lp:anki i18n
$ cd i18n
$ ./update-mos.sh
$ cd ../dtop
And now you're ready to run Anki:
$ ./runanki
If you get any errors, please make sure you don't have an older version of
Anki installed in a system location.
Before contributing code, please read the LICENSE file.
If you'd like to contribute translations, please see the translations section
of http://ankisrs.net/docs/manual.html#_contributing
WINDOWS USERS:
I have not tested the build scripts on Windows, so you'll need to solve any
problems you encounter on your own. The easiest way is to use a source
tarball instead of git, as that way you don't need to build the UI yourself.
If you do want to use git, two alternatives have been contributed by users. As
these are not official solutions, I'm afraid we can not provide you with any
support for these.
A powershell script:
https://gist.github.com/vermiceli/108fec65759d19645ee3
Or a way with git bash and perl:
1) Install "git bash".
2) In the tools directory, modify build_ui.sh. Locate the line that reads
"pyuic4 $i -o $py" and alter it to be of the following form:
"<python-path-string>" "<pyuic-path-string>" $i -o $py
These two paths must point to your python executable, and to pyuic.py, on your
system. Typical paths would be:
<python-path> = C:\\Python27\\python.exe
<pyuic-path-string> = C:\\Python27\\Lib\\site-packages\\PyQt4\\uic\\pyuic.py
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/README.development
|
README.development
|
__docformat__ = 'restructuredtext'
import re, sys, threading, time, subprocess, os, signal, errno, atexit
import tempfile, shutil
from oldanki.hooks import addHook, runHook
# Shared utils
##########################################################################
def playFromText(text):
for match in re.findall("\[sound:(.*?)\]", text):
play(match)
def stripSounds(text):
return re.sub("\[sound:.*?\]", "", text)
def hasSound(text):
return re.search("\[sound:.*?\]", text) is not None
##########################################################################
# the amount of noise to cancel
NOISE_AMOUNT = "0.1"
# the amount of amplification
NORM_AMOUNT = "-3"
# the amount of bass
BASS_AMOUNT = "+0"
# the amount to fade at end
FADE_AMOUNT = "0.25"
noiseProfile = ""
processingSrc = "rec.wav"
processingDst = "rec.mp3"
processingChain = []
recFiles = ["rec2.wav", "rec3.wav"]
cmd = ["sox", processingSrc, "rec2.wav"]
processingChain = [
None, # placeholder
["sox", "rec2.wav", "rec3.wav", "norm", NORM_AMOUNT,
"bass", BASS_AMOUNT, "fade", FADE_AMOUNT],
["lame", "rec3.wav", processingDst, "--noreplaygain", "--quiet"],
]
tmpdir = None
# don't show box on windows
if sys.platform == "win32":
si = subprocess.STARTUPINFO()
try:
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
except:
# python2.7+
si.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW
# tmp dir for non-hashed media
tmpdir = unicode(
tempfile.mkdtemp(prefix="oldanki"), sys.getfilesystemencoding())
else:
si = None
if sys.platform.startswith("darwin"):
# make sure lame, which is installed in /usr/local/bin, is in the path
os.environ['PATH'] += ":" + "/usr/local/bin"
dir = os.path.dirname(os.path.abspath(__file__))
dir = os.path.abspath(dir + "/../../../..")
os.environ['PATH'] += ":" + dir + "/audio"
def retryWait(proc):
# osx throws interrupted system call errors frequently
while 1:
try:
return proc.wait()
except OSError:
continue
# Noise profiles
##########################################################################
def checkForNoiseProfile():
global processingChain
if sys.platform.startswith("darwin"):
# not currently supported
processingChain = [
["lame", "rec.wav", "rec.mp3", "--noreplaygain", "--quiet"]]
else:
cmd = ["sox", processingSrc, "rec2.wav"]
if os.path.exists(noiseProfile):
cmd = cmd + ["noisered", noiseProfile, NOISE_AMOUNT]
processingChain[0] = cmd
def generateNoiseProfile():
try:
os.unlink(noiseProfile)
except OSError:
pass
retryWait(subprocess.Popen(
["sox", processingSrc, recFiles[0], "trim", "1.5", "1.5"],
startupinfo=si))
retryWait(subprocess.Popen(["sox", recFiles[0], recFiles[1],
"noiseprof", noiseProfile],
startupinfo=si))
processingChain[0] = ["sox", processingSrc, "rec2.wav",
"noisered", noiseProfile, NOISE_AMOUNT]
# Mplayer settings
##########################################################################
if sys.platform.startswith("win32"):
mplayerCmd = ["mplayer.exe", "-ao", "win32", "-really-quiet"]
dir = os.path.dirname(os.path.abspath(sys.argv[0]))
os.environ['PATH'] += ";" + dir
os.environ['PATH'] += ";" + dir + "\\..\\win\\top" # for testing
else:
mplayerCmd = ["mplayer", "-really-quiet"]
# Mplayer in slave mode
##########################################################################
mplayerQueue = []
mplayerManager = None
mplayerReader = None
mplayerEvt = threading.Event()
mplayerClear = False
class MplayerReader(threading.Thread):
"Read any debugging info to prevent mplayer from blocking."
def run(self):
while 1:
mplayerEvt.wait()
try:
mplayerManager.mplayer.stdout.read()
except:
pass
class MplayerMonitor(threading.Thread):
def run(self):
global mplayerClear
self.mplayer = None
self.deadPlayers = []
while 1:
mplayerEvt.wait()
if mplayerQueue:
# ensure started
if not self.mplayer:
self.startProcess()
# loop through files to play
while mplayerQueue:
item = mplayerQueue.pop(0)
if mplayerClear:
mplayerClear = False
extra = ""
else:
extra = " 1"
cmd = 'loadfile "%s"%s\n' % (item, extra)
try:
self.mplayer.stdin.write(cmd)
except:
# mplayer has quit and needs restarting
self.deadPlayers.append(self.mplayer)
self.mplayer = None
self.startProcess()
self.mplayer.stdin.write(cmd)
# wait() on finished processes. we don't want to block on the
# wait, so we keep trying each time we're reactivated
def clean(pl):
if pl.poll() is not None:
pl.wait()
return False
else:
return True
self.deadPlayers = [pl for pl in self.deadPlayers if clean(pl)]
mplayerEvt.clear()
def kill(self):
if not self.mplayer:
return
try:
self.mplayer.stdin.write("quit\n")
self.deadPlayers.append(self.mplayer)
except:
pass
self.mplayer = None
def startProcess(self):
try:
cmd = mplayerCmd + ["-slave", "-idle"]
self.mplayer = subprocess.Popen(
cmd, startupinfo=si, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except OSError:
mplayerEvt.clear()
raise Exception("Audio player not found")
def queueMplayer(path):
ensureMplayerThreads()
while mplayerEvt.isSet():
time.sleep(0.1)
if tmpdir and os.path.exists(path):
# mplayer on windows doesn't like the encoding, so we create a
# temporary file instead. oddly, foreign characters in the dirname
# don't seem to matter.
(fd, name) = tempfile.mkstemp(suffix=os.path.splitext(path)[1],
dir=tmpdir)
f = os.fdopen(fd, "wb")
f.write(open(path, "rb").read())
f.close()
# it wants unix paths, too!
path = name.replace("\\", "/")
path = path.encode(sys.getfilesystemencoding())
else:
path = path.encode("utf-8")
mplayerQueue.append(path)
mplayerEvt.set()
runHook("soundQueued")
def clearMplayerQueue():
global mplayerClear
mplayerClear = True
mplayerEvt.set()
def ensureMplayerThreads():
global mplayerManager, mplayerReader
if not mplayerManager:
mplayerManager = MplayerMonitor()
mplayerManager.daemon = True
mplayerManager.start()
mplayerReader = MplayerReader()
mplayerReader.daemon = True
mplayerReader.start()
def stopMplayer():
if not mplayerManager:
return
mplayerManager.kill()
def onExit():
if tmpdir:
shutil.rmtree(tmpdir)
addHook("deckClosed", stopMplayer)
atexit.register(onExit)
# PyAudio recording
##########################################################################
try:
import pyaudio
import wave
PYAU_FORMAT = pyaudio.paInt16
PYAU_CHANNELS = 1
PYAU_RATE = 44100
PYAU_INPUT_INDEX = None
except:
pass
class _Recorder(object):
def postprocess(self, encode=True):
self.encode = encode
for c in processingChain:
#print c
if not self.encode and c[0] == 'lame':
continue
ret = retryWait(subprocess.Popen(c, startupinfo=si))
if ret:
raise Exception(_("""
Error processing audio.
If you're on Linux and don't have sox 14.1+, you
need to disable normalization. See the wiki.
Command was:\n""") + u" ".join(c))
class PyAudioThreadedRecorder(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.finish = False
def run(self):
chunk = 1024
try:
p = pyaudio.PyAudio()
except NameError:
raise Exception(
"Pyaudio not installed (recording not supported on OSX10.3)")
stream = p.open(format=PYAU_FORMAT,
channels=PYAU_CHANNELS,
rate=PYAU_RATE,
input=True,
input_device_index=PYAU_INPUT_INDEX,
frames_per_buffer=chunk)
all = []
while not self.finish:
try:
data = stream.read(chunk)
except IOError, e:
if e[1] == pyaudio.paInputOverflowed:
data = None
else:
raise
if data:
all.append(data)
stream.close()
p.terminate()
data = ''.join(all)
wf = wave.open(processingSrc, 'wb')
wf.setnchannels(PYAU_CHANNELS)
wf.setsampwidth(p.get_sample_size(PYAU_FORMAT))
wf.setframerate(PYAU_RATE)
wf.writeframes(data)
wf.close()
class PyAudioRecorder(_Recorder):
def __init__(self):
for t in recFiles + [processingSrc, processingDst]:
try:
os.unlink(t)
except OSError:
pass
self.encode = False
def start(self):
self.thread = PyAudioThreadedRecorder()
self.thread.start()
def stop(self):
self.thread.finish = True
self.thread.join()
def file(self):
if self.encode:
tgt = "rec%d.mp3" % time.time()
os.rename(processingDst, tgt)
return tgt
else:
return recFiles[1]
# Audio interface
##########################################################################
_player = queueMplayer
_queueEraser = clearMplayerQueue
def play(path):
_player(path)
def clearAudioQueue():
_queueEraser()
Recorder = PyAudioRecorder
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/sound.py
|
sound.py
|
__docformat__ = 'restructuredtext'
import time, sys, math, random
from oldanki.db import *
from oldanki.models import CardModel, Model, FieldModel, formatQA
from oldanki.facts import Fact, factsTable, Field
from oldanki.utils import parseTags, findTag, stripHTML, genID, hexifyID
from oldanki.media import updateMediaCount, mediaFiles
MAX_TIMER = 60
# Cards
##########################################################################
cardsTable = Table(
'cards', metadata,
Column('id', Integer, primary_key=True),
Column('factId', Integer, ForeignKey("facts.id"), nullable=False),
Column('cardModelId', Integer, ForeignKey("cardModels.id"), nullable=False),
Column('created', Float, nullable=False, default=time.time),
Column('modified', Float, nullable=False, default=time.time),
Column('tags', UnicodeText, nullable=False, default=u""),
Column('ordinal', Integer, nullable=False),
# cached - changed on fact update
Column('question', UnicodeText, nullable=False, default=u""),
Column('answer', UnicodeText, nullable=False, default=u""),
# default to 'normal' priority;
# this is indexed in deck.py as we need to create a reverse index
Column('priority', Integer, nullable=False, default=2),
Column('interval', Float, nullable=False, default=0),
Column('lastInterval', Float, nullable=False, default=0),
Column('due', Float, nullable=False, default=time.time),
Column('lastDue', Float, nullable=False, default=0),
Column('factor', Float, nullable=False, default=2.5),
Column('lastFactor', Float, nullable=False, default=2.5),
Column('firstAnswered', Float, nullable=False, default=0),
# stats
Column('reps', Integer, nullable=False, default=0),
Column('successive', Integer, nullable=False, default=0),
Column('averageTime', Float, nullable=False, default=0),
Column('reviewTime', Float, nullable=False, default=0),
Column('youngEase0', Integer, nullable=False, default=0),
Column('youngEase1', Integer, nullable=False, default=0),
Column('youngEase2', Integer, nullable=False, default=0),
Column('youngEase3', Integer, nullable=False, default=0),
Column('youngEase4', Integer, nullable=False, default=0),
Column('matureEase0', Integer, nullable=False, default=0),
Column('matureEase1', Integer, nullable=False, default=0),
Column('matureEase2', Integer, nullable=False, default=0),
Column('matureEase3', Integer, nullable=False, default=0),
Column('matureEase4', Integer, nullable=False, default=0),
# this duplicates the above data, because there's no way to map imported
# data to the above
Column('yesCount', Integer, nullable=False, default=0),
Column('noCount', Integer, nullable=False, default=0),
# obsolete
Column('spaceUntil', Float, nullable=False, default=0),
# relativeDelay is reused as type without scheduling (ie, it remains 0-2
# even if card is suspended, etc)
Column('relativeDelay', Float, nullable=False, default=0),
Column('isDue', Boolean, nullable=False, default=0), # obsolete
Column('type', Integer, nullable=False, default=2),
Column('combinedDue', Integer, nullable=False, default=0))
class Card(object):
"A card."
def __init__(self, fact=None, cardModel=None, created=None):
self.tags = u""
self.id = genID()
# new cards start as new & due
self.type = 2
self.relativeDelay = self.type
self.timerStarted = False
self.timerStopped = False
self.modified = time.time()
if created:
self.created = created
self.due = created
else:
self.due = self.modified
self.combinedDue = self.due
if fact:
self.fact = fact
if cardModel:
self.cardModel = cardModel
# for non-orm use
self.cardModelId = cardModel.id
self.ordinal = cardModel.ordinal
def rebuildQA(self, deck, media=True):
# format qa
d = {}
for f in self.fact.model.fieldModels:
d[f.name] = (f.id, self.fact[f.name])
qa = formatQA(None, self.fact.modelId, d, self.splitTags(),
self.cardModel, deck)
# find old media references
files = {}
for type in ("question", "answer"):
for f in mediaFiles(getattr(self, type) or ""):
if f in files:
files[f] -= 1
else:
files[f] = -1
# update q/a
self.question = qa['question']
self.answer = qa['answer']
# determine media delta
for type in ("question", "answer"):
for f in mediaFiles(getattr(self, type)):
if f in files:
files[f] += 1
else:
files[f] = 1
# update media counts if we're attached to deck
# if media:
# for (f, cnt) in files.items():
# updateMediaCount(deck, f, cnt)
self.setModified()
def setModified(self):
self.modified = time.time()
def startTimer(self):
self.timerStarted = time.time()
def stopTimer(self):
self.timerStopped = time.time()
def thinkingTime(self):
return (self.timerStopped or time.time()) - self.timerStarted
def totalTime(self):
return time.time() - self.timerStarted
def genFuzz(self):
"Generate a random offset to spread intervals."
self.fuzz = random.uniform(0.95, 1.05)
def htmlQuestion(self, type="question", align=True):
div = '''<div class="card%s" id="cm%s%s">%s</div>''' % (
type[0], type[0], hexifyID(self.cardModelId),
getattr(self, type))
# add outer div & alignment (with tables due to qt's html handling)
if not align:
return div
attr = type + 'Align'
if getattr(self.cardModel, attr) == 0:
align = "center"
elif getattr(self.cardModel, attr) == 1:
align = "left"
else:
align = "right"
return (("<center><table width=95%%><tr><td align=%s>" % align) +
div + "</td></tr></table></center>")
def htmlAnswer(self, align=True):
return self.htmlQuestion(type="answer", align=align)
def updateStats(self, ease, state):
self.reps += 1
if ease > 1:
self.successive += 1
else:
self.successive = 0
delay = min(self.totalTime(), MAX_TIMER)
self.reviewTime += delay
if self.averageTime:
self.averageTime = (self.averageTime + delay) / 2.0
else:
self.averageTime = delay
# we don't track first answer for cards
if state == "new":
state = "young"
# update ease and yes/no count
attr = state + "Ease%d" % ease
setattr(self, attr, getattr(self, attr) + 1)
if ease < 2:
self.noCount += 1
else:
self.yesCount += 1
if not self.firstAnswered:
self.firstAnswered = time.time()
self.setModified()
def splitTags(self):
return (self.fact.tags, self.fact.model.tags, self.cardModel.name)
def allTags(self):
"Non-canonified string of all tags."
return (self.fact.tags + "," +
self.fact.model.tags)
def hasTag(self, tag):
return findTag(tag, parseTags(self.allTags()))
def fromDB(self, s, id):
r = s.first("""select
id, factId, cardModelId, created, modified, tags, ordinal, question, answer,
priority, interval, lastInterval, due, lastDue, factor,
lastFactor, firstAnswered, reps, successive, averageTime, reviewTime,
youngEase0, youngEase1, youngEase2, youngEase3, youngEase4,
matureEase0, matureEase1, matureEase2, matureEase3, matureEase4,
yesCount, noCount, spaceUntil, isDue, type, combinedDue
from cards where id = :id""", id=id)
if not r:
return
(self.id,
self.factId,
self.cardModelId,
self.created,
self.modified,
self.tags,
self.ordinal,
self.question,
self.answer,
self.priority,
self.interval,
self.lastInterval,
self.due,
self.lastDue,
self.factor,
self.lastFactor,
self.firstAnswered,
self.reps,
self.successive,
self.averageTime,
self.reviewTime,
self.youngEase0,
self.youngEase1,
self.youngEase2,
self.youngEase3,
self.youngEase4,
self.matureEase0,
self.matureEase1,
self.matureEase2,
self.matureEase3,
self.matureEase4,
self.yesCount,
self.noCount,
self.spaceUntil,
self.isDue,
self.type,
self.combinedDue) = r
return True
def toDB(self, s):
"Write card to DB."
s.execute("""update cards set
modified=:modified,
tags=:tags,
interval=:interval,
lastInterval=:lastInterval,
due=:due,
lastDue=:lastDue,
factor=:factor,
lastFactor=:lastFactor,
firstAnswered=:firstAnswered,
reps=:reps,
successive=:successive,
averageTime=:averageTime,
reviewTime=:reviewTime,
youngEase0=:youngEase0,
youngEase1=:youngEase1,
youngEase2=:youngEase2,
youngEase3=:youngEase3,
youngEase4=:youngEase4,
matureEase0=:matureEase0,
matureEase1=:matureEase1,
matureEase2=:matureEase2,
matureEase3=:matureEase3,
matureEase4=:matureEase4,
yesCount=:yesCount,
noCount=:noCount,
spaceUntil = :spaceUntil,
isDue = 0,
type = :type,
combinedDue = :combinedDue,
relativeDelay = :relativeDelay,
priority = :priority
where id=:id""", self.__dict__)
mapper(Card, cardsTable, properties={
'cardModel': relation(CardModel),
'fact': relation(Fact, backref="cards", primaryjoin=
cardsTable.c.factId == factsTable.c.id),
})
mapper(Fact, factsTable, properties={
'model': relation(Model),
'fields': relation(Field, backref="fact", order_by=Field.ordinal),
})
# Card deletions
##########################################################################
cardsDeletedTable = Table(
'cardsDeleted', metadata,
Column('cardId', Integer, ForeignKey("cards.id"),
nullable=False),
Column('deletedTime', Float, nullable=False))
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/cards.py
|
cards.py
|
__docformat__ = 'restructuredtext'
import os, sys, time
import oldanki.stats
from oldanki.lang import _
import datetime
#colours for graphs
dueYoungC = "#ffb380"
dueMatureC = "#ff5555"
dueCumulC = "#ff8080"
reviewNewC = "#80ccff"
reviewYoungC = "#3377ff"
reviewMatureC = "#0000ff"
reviewTimeC = "#0fcaff"
easesNewC = "#80b3ff"
easesYoungC = "#5555ff"
easesMatureC = "#0f5aff"
addedC = "#b3ff80"
firstC = "#b380ff"
intervC = "#80e5ff"
# support frozen distribs
if sys.platform.startswith("darwin"):
try:
del os.environ['MATPLOTLIBDATA']
except:
pass
try:
from matplotlib.figure import Figure
except UnicodeEncodeError:
# haven't tracked down the cause of this yet, but reloading fixes it
try:
from matplotlib.figure import Figure
except ImportError:
pass
except ImportError:
pass
def graphsAvailable():
return 'matplotlib' in sys.modules
class DeckGraphs(object):
def __init__(self, deck, width=8, height=3, dpi=75, selective=True):
self.deck = deck
self.stats = None
self.width = width
self.height = height
self.dpi = dpi
self.selective = selective
def calcStats (self):
if not self.stats:
days = {}
daysYoung = {}
daysMature = {}
months = {}
next = {}
lowestInDay = 0
self.endOfDay = self.deck.failedCutoff
t = time.time()
young = """
select interval, combinedDue from cards c
where relativeDelay between 0 and 1 and type >= 0 and interval <= 21"""
mature = """
select interval, combinedDue
from cards c where relativeDelay = 1 and type >= 0 and interval > 21"""
if self.selective:
young = self.deck._cardLimit("revActive", "revInactive",
young)
mature = self.deck._cardLimit("revActive", "revInactive",
mature)
young = self.deck.s.all(young)
mature = self.deck.s.all(mature)
for (src, dest) in [(young, daysYoung),
(mature, daysMature)]:
for (interval, due) in src:
day=int(round(interval))
days[day] = days.get(day, 0) + 1
indays = int(((due - self.endOfDay) / 86400.0) + 1)
next[indays] = next.get(indays, 0) + 1 # type-agnostic stats
dest[indays] = dest.get(indays, 0) + 1 # type-specific stats
if indays < lowestInDay:
lowestInDay = indays
self.stats = {}
self.stats['next'] = next
self.stats['days'] = days
self.stats['daysByType'] = {'young': daysYoung,
'mature': daysMature}
self.stats['months'] = months
self.stats['lowestInDay'] = lowestInDay
dayReps = self.deck.s.all("""
select day,
matureEase0+matureEase1+matureEase2+matureEase3+matureEase4 as matureReps,
reps-(newEase0+newEase1+newEase2+newEase3+newEase4) as combinedYoungReps,
reps as combinedNewReps
from stats
where type = 1""")
dayTimes = self.deck.s.all("""
select day, reviewTime as reviewTime
from stats
where type = 1""")
todaydt = self.deck._dailyStats.day
for dest, source in [("dayRepsNew", "combinedNewReps"),
("dayRepsYoung", "combinedYoungReps"),
("dayRepsMature", "matureReps")]:
self.stats[dest] = dict(
map(lambda dr: (-(todaydt -datetime.date(
*(int(x)for x in dr["day"].split("-")))).days, dr[source]), dayReps))
self.stats['dayTimes'] = dict(
map(lambda dr: (-(todaydt -datetime.date(
*(int(x)for x in dr["day"].split("-")))).days, dr["reviewTime"]/60.0), dayTimes))
def nextDue(self, days=30):
self.calcStats()
fig = Figure(figsize=(self.width, self.height), dpi=self.dpi)
graph = fig.add_subplot(111)
dayslists = [self.stats['next'], self.stats['daysByType']['mature']]
for dayslist in dayslists:
self.addMissing(dayslist, self.stats['lowestInDay'], days)
argl = []
for dayslist in dayslists:
dl = [x for x in dayslist.items() if x[0] <= days]
argl.extend(list(self.unzip(dl)))
self.varGraph(graph, days, [dueYoungC, dueMatureC], *argl)
cheat = fig.add_subplot(111)
b1 = cheat.bar(0, 0, color = dueYoungC)
b2 = cheat.bar(1, 0, color = dueMatureC)
cheat.legend([b1, b2], [
"Young",
"Mature"], loc='upper right')
graph.set_xlim(xmin=self.stats['lowestInDay'], xmax=days+1)
graph.set_xlabel("Day (0 = today)")
graph.set_ylabel("Cards Due")
return fig
def workDone(self, days=30):
self.calcStats()
for type in ["dayRepsNew", "dayRepsYoung", "dayRepsMature"]:
self.addMissing(self.stats[type], -days, 0)
fig = Figure(figsize=(self.width, self.height), dpi=self.dpi)
graph = fig.add_subplot(111)
args = sum((self.unzip(self.stats[type].items(), limit=days, reverseLimit=True) for type in ["dayRepsMature", "dayRepsYoung", "dayRepsNew"][::-1]), [])
self.varGraph(graph, days, [reviewNewC, reviewYoungC, reviewMatureC], *args)
cheat = fig.add_subplot(111)
b1 = cheat.bar(-3, 0, color = reviewNewC)
b2 = cheat.bar(-4, 0, color = reviewYoungC)
b3 = cheat.bar(-5, 0, color = reviewMatureC)
cheat.legend([b1, b2, b3], [
"New",
"Young",
"Mature"], loc='upper left')
graph.set_xlim(xmin=-days+1, xmax=1)
graph.set_ylim(ymax=max(max(a for a in args[1::2])) + 10)
graph.set_xlabel("Day (0 = today)")
graph.set_ylabel("Cards Answered")
return fig
def timeSpent(self, days=30):
self.calcStats()
fig = Figure(figsize=(self.width, self.height), dpi=self.dpi)
times = self.stats['dayTimes']
self.addMissing(times, -days+1, 0)
times = self.unzip([(day,y) for (day,y) in times.items()
if day + days >= 0])
graph = fig.add_subplot(111)
self.varGraph(graph, days, reviewTimeC, *times)
graph.set_xlim(xmin=-days+1, xmax=1)
graph.set_ylim(ymax=max(a for a in times[1]) + 0.1)
graph.set_xlabel("Day (0 = today)")
graph.set_ylabel("Minutes")
return fig
def cumulativeDue(self, days=30):
self.calcStats()
fig = Figure(figsize=(self.width, self.height), dpi=self.dpi)
graph = fig.add_subplot(111)
self.addMissing(self.stats['next'], 0, days-1)
dl = [x for x in self.stats['next'].items() if x[0] <= days]
(x, y) = self.unzip(dl)
count=0
y = list(y)
for i in range(len(x)):
count = count + y[i]
if i == 0:
continue
y[i] = count
if x[i] > days:
break
self._filledGraph(graph, days, dueCumulC, 1, x, y)
graph.set_xlim(xmin=self.stats['lowestInDay'], xmax=days-1)
graph.set_ylim(ymax=graph.get_ylim()[1]+10)
graph.set_xlabel("Day (0 = today)")
graph.set_ylabel("Cards Due")
return fig
def intervalPeriod(self, days=30):
self.calcStats()
fig = Figure(figsize=(self.width, self.height), dpi=self.dpi)
ints = self.stats['days']
self.addMissing(ints, 0, days)
intervals = self.unzip(ints.items(), limit=days)
graph = fig.add_subplot(111)
self.varGraph(graph, days, intervC, *intervals)
graph.set_xlim(xmin=0, xmax=days+1)
graph.set_xlabel("Card Interval")
graph.set_ylabel("Number of Cards")
return fig
def addedRecently(self, numdays=30, attr='created'):
self.calcStats()
days = {}
fig = Figure(figsize=(self.width, self.height), dpi=self.dpi)
limit = self.endOfDay - (numdays) * 86400
res = self.deck.s.column0("select %s from cards where %s >= %f" %
(attr, attr, limit))
for r in res:
d = int((r - self.endOfDay) / 86400.0)
days[d] = days.get(d, 0) + 1
self.addMissing(days, -numdays+1, 0)
graph = fig.add_subplot(111)
intervals = self.unzip(days.items())
if attr == 'created':
colour = addedC
else:
colour = firstC
self.varGraph(graph, numdays, colour, *intervals)
graph.set_xlim(xmin=-numdays+1, xmax=1)
graph.set_xlabel("Day (0 = today)")
if attr == 'created':
graph.set_ylabel("Cards Added")
else:
graph.set_ylabel("Cards First Answered")
return fig
def addMissing(self, dic, min, max):
for i in range(min, max+1):
if not i in dic:
dic[i] = 0
def unzip(self, tuples, fillFix=True, limit=None, reverseLimit=False):
tuples.sort(cmp=lambda x,y: cmp(x[0], y[0]))
if limit:
if reverseLimit:
tuples = tuples[-limit:]
else:
tuples = tuples[:limit+1]
new = zip(*tuples)
return new
def varGraph(self, graph, days, colours=["b"], *args):
if len(args[0]) < 120:
return self.barGraph(graph, days, colours, *args)
else:
return self.filledGraph(graph, days, colours, *args)
def filledGraph(self, graph, days, colours=["b"], *args):
self._filledGraph(graph, days, colours, 0, *args)
def _filledGraph(self, graph, days, colours, lw, *args):
if isinstance(colours, str):
colours = [colours]
for triplet in [(args[n], args[n + 1], colours[n / 2]) for n in range(0, len(args), 2)]:
x = list(triplet[0])
y = list(triplet[1])
c = triplet[2]
lowest = 99999
highest = -lowest
for i in range(len(x)):
if x[i] < lowest:
lowest = x[i]
if x[i] > highest:
highest = x[i]
# ensure the filled area reaches the bottom
x.insert(0, lowest - 1)
y.insert(0, 0)
x.append(highest + 1)
y.append(0)
# plot
graph.fill(x, y, c, lw=lw)
graph.grid(True)
graph.set_ylim(ymin=0, ymax=max(2, graph.get_ylim()[1]))
def barGraph(self, graph, days, colours, *args):
if isinstance(colours, str):
colours = [colours]
lim = None
for triplet in [(args[n], args[n + 1], colours[n / 2]) for n in range(0, len(args), 2)]:
x = list(triplet[0])
y = list(triplet[1])
c = triplet[2]
lw = 0
if lim is None:
lim = (x[0], x[-1])
length = (lim[1] - lim[0])
if len(args) > 4:
if length <= 30:
lw = 1
else:
if length <= 90:
lw = 1
lowest = 99999
highest = -lowest
for i in range(len(x)):
if x[i] < lowest:
lowest = x[i]
if x[i] > highest:
highest = x[i]
graph.bar(x, y, color=c, width=1, linewidth=lw)
graph.grid(True)
graph.set_ylim(ymin=0, ymax=max(2, graph.get_ylim()[1]))
import numpy as np
if length > 10:
step = length / 10.0
# python's range() won't accept float step args, so we do it manually
if lim[0] < 0:
ticks = [int(lim[1] - step * x) for x in range(10)]
else:
ticks = [int(lim[0] + step * x) for x in range(10)]
else:
ticks = list(xrange(lim[0], lim[1]+1))
graph.set_xticks(np.array(ticks) + 0.5)
graph.set_xticklabels([str(int(x)) for x in ticks])
for tick in graph.xaxis.get_major_ticks():
tick.tick1On = False
tick.tick2On = False
def easeBars(self):
fig = Figure(figsize=(3, 3), dpi=self.dpi)
graph = fig.add_subplot(111)
types = ("new", "young", "mature")
enum = 5
offset = 0
arrsize = 16
arr = [0] * arrsize
n = 0
colours = [easesNewC, easesYoungC, easesMatureC]
bars = []
gs = oldanki.stats.globalStats(self.deck)
for type in types:
total = (getattr(gs, type + "Ease0") +
getattr(gs, type + "Ease1") +
getattr(gs, type + "Ease2") +
getattr(gs, type + "Ease3") +
getattr(gs, type + "Ease4"))
setattr(gs, type + "Ease1", getattr(gs, type + "Ease0") +
getattr(gs, type + "Ease1"))
setattr(gs, type + "Ease0", -1)
for e in range(1, enum):
try:
arr[e+offset] = (getattr(gs, type + "Ease%d" % e)
/ float(total)) * 100 + 1
except ZeroDivisionError:
arr[e+offset] = 0
bars.append(graph.bar(range(arrsize), arr, width=1.0,
color=colours[n], align='center'))
arr = [0] * arrsize
offset += 5
n += 1
x = ([""] + [str(n) for n in range(1, enum)]) * 3
graph.legend([p[0] for p in bars], ("New",
"Young",
"Mature"),
'upper left')
graph.set_ylim(ymax=100)
graph.set_xlim(xmax=15)
graph.set_xticks(range(arrsize))
graph.set_xticklabels(x)
graph.set_ylabel("% of Answers")
graph.set_xlabel("Answer Buttons")
graph.grid(True)
return fig
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/graphs.py
|
graphs.py
|
__docformat__ = 'restructuredtext'
import re, tempfile, os, sys, shutil, cgi, subprocess
from oldanki.utils import genID, checksum, call
from oldanki.hooks import addHook
from htmlentitydefs import entitydefs
from oldanki.lang import _
latexDviPngCmd = ["dvipng", "-D", "200", "-T", "tight"]
regexps = {
"standard": re.compile(r"\[latex\](.+?)\[/latex\]", re.DOTALL | re.IGNORECASE),
"expression": re.compile(r"\[\$\](.+?)\[/\$\]", re.DOTALL | re.IGNORECASE),
"math": re.compile(r"\[\$\$\](.+?)\[/\$\$\]", re.DOTALL | re.IGNORECASE),
}
tmpdir = tempfile.mkdtemp(prefix="oldanki")
# add standard tex install location to osx
if sys.platform == "darwin":
os.environ['PATH'] += ":/usr/texbin"
def renderLatex(deck, text, build=True):
"Convert TEXT with embedded latex tags to image links."
for match in regexps['standard'].finditer(text):
text = text.replace(match.group(), imgLink(deck, match.group(1),
build))
for match in regexps['expression'].finditer(text):
text = text.replace(match.group(), imgLink(
deck, "$" + match.group(1) + "$", build))
for match in regexps['math'].finditer(text):
text = text.replace(match.group(), imgLink(
deck,
"\\begin{displaymath}" + match.group(1) + "\\end{displaymath}",
build))
return text
def stripLatex(text):
for match in regexps['standard'].finditer(text):
text = text.replace(match.group(), "")
for match in regexps['expression'].finditer(text):
text = text.replace(match.group(), "")
for match in regexps['math'].finditer(text):
text = text.replace(match.group(), "")
return text
def latexImgFile(deck, latexCode):
key = checksum(latexCode)
return "latex-%s.png" % key
def mungeLatex(deck, latex):
"Convert entities, fix newlines, convert to utf8, and wrap pre/postamble."
for match in re.compile("&([a-z]+);", re.IGNORECASE).finditer(latex):
if match.group(1) in entitydefs:
latex = latex.replace(match.group(), entitydefs[match.group(1)])
latex = re.sub("<br( /)?>", "\n", latex)
latex = (deck.getVar("latexPre") + "\n" +
latex + "\n" +
deck.getVar("latexPost"))
latex = latex.encode("utf-8")
return latex
def buildImg(deck, latex):
log = open(os.path.join(tmpdir, "latex_log.txt"), "w+")
texpath = os.path.join(tmpdir, "tmp.tex")
texfile = file(texpath, "w")
texfile.write(latex)
texfile.close()
# make sure we have a valid mediaDir
mdir = deck.mediaDir(create=True)
oldcwd = os.getcwd()
if sys.platform == "win32":
si = subprocess.STARTUPINFO()
try:
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
except:
si.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW
else:
si = None
try:
os.chdir(tmpdir)
def errmsg(type):
msg = _("Error executing %s.\n") % type
try:
log = open(os.path.join(tmpdir, "latex_log.txt")).read()
msg += "<small><pre>" + cgi.escape(log) + "</pre></small>"
except:
msg += _("Have you installed latex and dvipng?")
pass
return msg
if call(["latex", "-interaction=nonstopmode",
"tmp.tex"], stdout=log, stderr=log, startupinfo=si):
return (False, errmsg("latex"))
if call(latexDviPngCmd + ["tmp.dvi", "-o", "tmp.png"],
stdout=log, stderr=log, startupinfo=si):
return (False, errmsg("dvipng"))
# add to media
target = latexImgFile(deck, latex)
shutil.copy2(os.path.join(tmpdir, "tmp.png"),
os.path.join(mdir, target))
return (True, target)
finally:
os.chdir(oldcwd)
def imageForLatex(deck, latex, build=True):
"Return an image that represents 'latex', building if necessary."
imageFile = latexImgFile(deck, latex)
ok = True
if build and (not imageFile or not os.path.exists(imageFile)):
(ok, imageFile) = buildImg(deck, latex)
if not ok:
return (False, imageFile)
return (True, imageFile)
def imgLink(deck, latex, build=True):
"Parse LATEX and return a HTML image representing the output."
munged = mungeLatex(deck, latex)
(ok, img) = imageForLatex(deck, munged, build)
if ok:
return '<img src="%s" alt="%s">' % (img, latex)
else:
return img
def formatQA(html, type, cid, mid, fact, tags, cm, deck):
return renderLatex(deck, html)
# setup q/a filter
addHook("formatQA", formatQA)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/latex.py
|
latex.py
|
__docformat__ = 'restructuredtext'
# we track statistics over the life of the deck, and per-day
STATS_LIFE = 0
STATS_DAY = 1
import unicodedata, time, sys, os, datetime
import oldanki, oldanki.utils
from datetime import date
from oldanki.db import *
from oldanki.lang import _, ngettext
from oldanki.utils import canonifyTags, ids2str
from oldanki.hooks import runFilter
# Tracking stats on the DB
##########################################################################
statsTable = Table(
'stats', metadata,
Column('id', Integer, primary_key=True),
Column('type', Integer, nullable=False),
Column('day', Date, nullable=False),
Column('reps', Integer, nullable=False, default=0),
Column('averageTime', Float, nullable=False, default=0),
Column('reviewTime', Float, nullable=False, default=0),
# next two columns no longer used
Column('distractedTime', Float, nullable=False, default=0),
Column('distractedReps', Integer, nullable=False, default=0),
Column('newEase0', Integer, nullable=False, default=0),
Column('newEase1', Integer, nullable=False, default=0),
Column('newEase2', Integer, nullable=False, default=0),
Column('newEase3', Integer, nullable=False, default=0),
Column('newEase4', Integer, nullable=False, default=0),
Column('youngEase0', Integer, nullable=False, default=0),
Column('youngEase1', Integer, nullable=False, default=0),
Column('youngEase2', Integer, nullable=False, default=0),
Column('youngEase3', Integer, nullable=False, default=0),
Column('youngEase4', Integer, nullable=False, default=0),
Column('matureEase0', Integer, nullable=False, default=0),
Column('matureEase1', Integer, nullable=False, default=0),
Column('matureEase2', Integer, nullable=False, default=0),
Column('matureEase3', Integer, nullable=False, default=0),
Column('matureEase4', Integer, nullable=False, default=0))
class Stats(object):
def __init__(self):
self.day = None
self.reps = 0
self.averageTime = 0
self.reviewTime = 0
self.distractedTime = 0
self.distractedReps = 0
self.newEase0 = 0
self.newEase1 = 0
self.newEase2 = 0
self.newEase3 = 0
self.newEase4 = 0
self.youngEase0 = 0
self.youngEase1 = 0
self.youngEase2 = 0
self.youngEase3 = 0
self.youngEase4 = 0
self.matureEase0 = 0
self.matureEase1 = 0
self.matureEase2 = 0
self.matureEase3 = 0
self.matureEase4 = 0
def fromDB(self, s, id):
r = s.first("select * from stats where id = :id", id=id)
(self.id,
self.type,
self.day,
self.reps,
self.averageTime,
self.reviewTime,
self.distractedTime,
self.distractedReps,
self.newEase0,
self.newEase1,
self.newEase2,
self.newEase3,
self.newEase4,
self.youngEase0,
self.youngEase1,
self.youngEase2,
self.youngEase3,
self.youngEase4,
self.matureEase0,
self.matureEase1,
self.matureEase2,
self.matureEase3,
self.matureEase4) = r
self.day = datetime.date(*[int(i) for i in self.day.split("-")])
def create(self, s, type, day):
self.type = type
self.day = day
s.execute("""insert into stats
(type, day, reps, averageTime, reviewTime, distractedTime, distractedReps,
newEase0, newEase1, newEase2, newEase3, newEase4, youngEase0, youngEase1,
youngEase2, youngEase3, youngEase4, matureEase0, matureEase1, matureEase2,
matureEase3, matureEase4) values (:type, :day, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)""", self.__dict__)
self.id = s.scalar(
"select id from stats where type = :type and day = :day",
type=type, day=day)
def toDB(self, s):
assert self.id
s.execute("""update stats set
type=:type,
day=:day,
reps=:reps,
averageTime=:averageTime,
reviewTime=:reviewTime,
newEase0=:newEase0,
newEase1=:newEase1,
newEase2=:newEase2,
newEase3=:newEase3,
newEase4=:newEase4,
youngEase0=:youngEase0,
youngEase1=:youngEase1,
youngEase2=:youngEase2,
youngEase3=:youngEase3,
youngEase4=:youngEase4,
matureEase0=:matureEase0,
matureEase1=:matureEase1,
matureEase2=:matureEase2,
matureEase3=:matureEase3,
matureEase4=:matureEase4
where id = :id""", self.__dict__)
mapper(Stats, statsTable)
def genToday(deck):
return datetime.datetime.utcfromtimestamp(
time.time() - deck.utcOffset).date()
def updateAllStats(s, gs, ds, card, ease, oldState):
"Update global and daily statistics."
updateStats(s, gs, card, ease, oldState)
updateStats(s, ds, card, ease, oldState)
def updateStats(s, stats, card, ease, oldState):
stats.reps += 1
delay = card.totalTime()
if delay >= 60:
stats.reviewTime += 60
else:
stats.reviewTime += delay
stats.averageTime = (
stats.reviewTime / float(stats.reps))
# update eases
attr = oldState + "Ease%d" % ease
setattr(stats, attr, getattr(stats, attr) + 1)
stats.toDB(s)
def globalStats(deck):
s = deck.s
type = STATS_LIFE
today = genToday(deck)
id = s.scalar("select id from stats where type = :type",
type=type)
stats = Stats()
if id:
stats.fromDB(s, id)
return stats
else:
stats.create(s, type, today)
stats.type = type
return stats
def dailyStats(deck):
s = deck.s
type = STATS_DAY
today = genToday(deck)
id = s.scalar("select id from stats where type = :type and day = :day",
type=type, day=today)
stats = Stats()
if id:
stats.fromDB(s, id)
return stats
else:
stats.create(s, type, today)
return stats
def summarizeStats(stats, pre=""):
"Generate percentages and total counts for STATS. Optionally prefix."
cardTypes = ("new", "young", "mature")
h = {}
# total counts
###############
for type in cardTypes:
# total yes/no for type, eg. gNewYes
h[pre + type.capitalize() + "No"] = (getattr(stats, type + "Ease0") +
getattr(stats, type + "Ease1"))
h[pre + type.capitalize() + "Yes"] = (getattr(stats, type + "Ease2") +
getattr(stats, type + "Ease3") +
getattr(stats, type + "Ease4"))
# total for type, eg. gNewTotal
h[pre + type.capitalize() + "Total"] = (
h[pre + type.capitalize() + "No"] +
h[pre + type.capitalize() + "Yes"])
# total yes/no, eg. gYesTotal
for answer in ("yes", "no"):
num = 0
for type in cardTypes:
num += h[pre + type.capitalize() + answer.capitalize()]
h[pre + answer.capitalize() + "Total"] = num
# total over all, eg. gTotal
num = 0
for type in cardTypes:
num += h[pre + type.capitalize() + "Total"]
h[pre + "Total"] = num
# percentages
##############
for type in cardTypes:
# total yes/no % by type, eg. gNewYes%
for answer in ("yes", "no"):
setPercentage(h, pre + type.capitalize() + answer.capitalize(),
pre + type.capitalize())
for answer in ("yes", "no"):
# total yes/no, eg. gYesTotal%
setPercentage(h, pre + answer.capitalize() + "Total", pre)
h[pre + 'AverageTime'] = stats.averageTime
h[pre + 'ReviewTime'] = stats.reviewTime
return h
def setPercentage(h, a, b):
try:
h[a + "%"] = (h[a] / float(h[b + "Total"])) * 100
except ZeroDivisionError:
h[a + "%"] = 0
def getStats(s, gs, ds):
"Return a handy dictionary exposing a number of internal stats."
h = {}
h.update(summarizeStats(gs, "g"))
h.update(summarizeStats(ds, "d"))
return h
# Card stats
##########################################################################
class CardStats(object):
def __init__(self, deck, card):
self.deck = deck
self.card = card
def report(self):
c = self.card
fmt = oldanki.utils.fmtTimeSpan
fmtFloat = oldanki.utils.fmtFloat
self.txt = "<table>"
self.addLine(_("Added"), self.strTime(c.created))
if c.firstAnswered:
self.addLine(_("First Review"), self.strTime(c.firstAnswered))
self.addLine(_("Changed"), self.strTime(c.modified))
if c.reps:
next = time.time() - c.combinedDue
if next > 0:
next = _("%s ago") % fmt(next)
else:
next = _("in %s") % fmt(abs(next))
self.addLine(_("Due"), next)
self.addLine(_("Interval"), fmt(c.interval * 86400))
self.addLine(_("Ease"), fmtFloat(c.factor, point=2))
if c.lastDue:
last = _("%s ago") % fmt(time.time() - c.lastDue)
self.addLine(_("Last Due"), last)
if c.interval != c.lastInterval:
# don't show the last interval if it hasn't been updated yet
self.addLine(_("Last Interval"), fmt(c.lastInterval * 86400))
self.addLine(_("Last Ease"), fmtFloat(c.lastFactor, point=2))
if c.reps:
self.addLine(_("Reviews"), "%d/%d (s=%d)" % (
c.yesCount, c.reps, c.successive))
avg = fmt(c.averageTime, point=2)
self.addLine(_("Average Time"),avg)
total = fmt(c.reviewTime, point=2)
self.addLine(_("Total Time"), total)
self.addLine(_("Model Tags"), c.fact.model.tags)
self.addLine(_("Card Template") + " "*5, c.cardModel.name)
self.txt += "</table>"
return self.txt
def addLine(self, k, v):
self.txt += "<tr><td><b>%s<b></td><td>%s</td></tr>" % (k, v)
def strTime(self, tm):
s = oldanki.utils.fmtTimeSpan(time.time() - tm)
return _("%s ago") % s
# Deck stats (specific to the 'sched' scheduler)
##########################################################################
class DeckStats(object):
def __init__(self, deck):
self.deck = deck
def report(self):
"Return an HTML string with a report."
fmtPerc = oldanki.utils.fmtPercentage
fmtFloat = oldanki.utils.fmtFloat
if self.deck.isEmpty():
return _("Please add some cards first.") + "<p/>"
d = self.deck
html="<h1>" + _("Deck Statistics") + "</h1>"
html += _("Deck created: <b>%s</b> ago<br>") % self.createdTimeStr()
total = d.cardCount
new = d.newCountAll()
young = d.youngCardCount()
old = d.matureCardCount()
newP = new / float(total) * 100
youngP = young / float(total) * 100
oldP = old / float(total) * 100
stats = d.getStats()
(stats["new"], stats["newP"]) = (new, newP)
(stats["old"], stats["oldP"]) = (old, oldP)
(stats["young"], stats["youngP"]) = (young, youngP)
html += _("Total number of cards:") + " <b>%d</b><br>" % total
html += _("Total number of facts:") + " <b>%d</b><br><br>" % d.factCount
html += "<b>" + _("Card Maturity") + "</b><br>"
html += _("Mature cards: <!--card count-->") + " <b>%(old)d</b> (%(oldP)s)<br>" % {
'old': stats['old'], 'oldP' : fmtPerc(stats['oldP'])}
html += _("Young cards: <!--card count-->") + " <b>%(young)d</b> (%(youngP)s)<br>" % {
'young': stats['young'], 'youngP' : fmtPerc(stats['youngP'])}
html += _("Unseen cards:") + " <b>%(new)d</b> (%(newP)s)<br>" % {
'new': stats['new'], 'newP' : fmtPerc(stats['newP'])}
avgInt = self.getAverageInterval()
if avgInt:
html += _("Average interval: ") + ("<b>%s</b> ") % fmtFloat(avgInt) + _("days")
html += "<br>"
html += "<br>"
html += "<b>" + _("Correct Answers") + "</b><br>"
html += _("Mature cards: <!--correct answers-->") + " <b>" + fmtPerc(stats['gMatureYes%']) + (
"</b> " + _("(%(partOf)d of %(totalSum)d)") % {
'partOf' : stats['gMatureYes'],
'totalSum' : stats['gMatureTotal'] } + "<br>")
html += _("Young cards: <!--correct answers-->") + " <b>" + fmtPerc(stats['gYoungYes%']) + (
"</b> " + _("(%(partOf)d of %(totalSum)d)") % {
'partOf' : stats['gYoungYes'],
'totalSum' : stats['gYoungTotal'] } + "<br>")
html += _("First-seen cards:") + " <b>" + fmtPerc(stats['gNewYes%']) + (
"</b> " + _("(%(partOf)d of %(totalSum)d)") % {
'partOf' : stats['gNewYes'],
'totalSum' : stats['gNewTotal'] } + "<br><br>")
# average pending time
existing = d.cardCount - d.newCountToday
def tr(a, b):
return "<tr><td>%s</td><td align=right>%s</td></tr>" % (a, b)
def repsPerDay(reps,days):
retval = ("<b>%d</b> " % reps) + ngettext("rep", "reps", reps)
retval += ("/<b>%d</b> " % days) + ngettext("day", "days", days)
return retval
if existing and avgInt:
html += "<b>" + _("Recent Work") + "</b>"
if sys.platform.startswith("darwin"):
html += "<table width=250>"
else:
html += "<table width=200>"
html += tr(_("In last week"), repsPerDay(
self.getRepsDone(-7, 0),
self.getDaysReviewed(-7, 0)))
html += tr(_("In last month"), repsPerDay(
self.getRepsDone(-30, 0),
self.getDaysReviewed(-30, 0)))
html += tr(_("In last 3 months"), repsPerDay(
self.getRepsDone(-92, 0),
self.getDaysReviewed(-92, 0)))
html += tr(_("In last 6 months"), repsPerDay(
self.getRepsDone(-182, 0),
self.getDaysReviewed(-182, 0)))
html += tr(_("In last year"), repsPerDay(
self.getRepsDone(-365, 0),
self.getDaysReviewed(-365, 0)))
html += tr(_("Deck life"), repsPerDay(
self.getRepsDone(-13000, 0),
self.getDaysReviewed(-13000, 0)))
html += "</table>"
html += "<br><br><b>" + _("Average Daily Reviews") + "</b>"
if sys.platform.startswith("darwin"):
html += "<table width=250>"
else:
html += "<table width=200>"
html += tr(_("Deck life"), ("<b>%s</b> ") % (
fmtFloat(self.getSumInverseRoundInterval())) + _("cards/day"))
html += tr(_("In next week"), ("<b>%s</b> ") % (
fmtFloat(self.getWorkloadPeriod(7))) + _("cards/day"))
html += tr(_("In next month"), ("<b>%s</b> ") % (
fmtFloat(self.getWorkloadPeriod(30))) + _("cards/day"))
html += tr(_("In last week"), ("<b>%s</b> ") % (
fmtFloat(self.getPastWorkloadPeriod(7))) + _("cards/day"))
html += tr(_("In last month"), ("<b>%s</b> ") % (
fmtFloat(self.getPastWorkloadPeriod(30))) + _("cards/day"))
html += tr(_("In last 3 months"), ("<b>%s</b> ") % (
fmtFloat(self.getPastWorkloadPeriod(92))) + _("cards/day"))
html += tr(_("In last 6 months"), ("<b>%s</b> ") % (
fmtFloat(self.getPastWorkloadPeriod(182))) + _("cards/day"))
html += tr(_("In last year"), ("<b>%s</b> ") % (
fmtFloat(self.getPastWorkloadPeriod(365))) + _("cards/day"))
html += "</table>"
html += "<br><br><b>" + _("Average Added") + "</b>"
if sys.platform.startswith("darwin"):
html += "<table width=250>"
else:
html += "<table width=200>"
html += tr(_("Deck life"), _("<b>%(a)s</b>/day, <b>%(b)s</b>/mon") % {
'a': fmtFloat(self.newAverage()), 'b': fmtFloat(self.newAverage()*30)})
np = self.getNewPeriod(7)
html += tr(_("In last week"), _("<b>%(a)d</b> (<b>%(b)s</b>/day)") % (
{'a': np, 'b': fmtFloat(np / float(7))}))
np = self.getNewPeriod(30)
html += tr(_("In last month"), _("<b>%(a)d</b> (<b>%(b)s</b>/day)") % (
{'a': np, 'b': fmtFloat(np / float(30))}))
np = self.getNewPeriod(92)
html += tr(_("In last 3 months"), _("<b>%(a)d</b> (<b>%(b)s</b>/day)") % (
{'a': np, 'b': fmtFloat(np / float(92))}))
np = self.getNewPeriod(182)
html += tr(_("In last 6 months"), _("<b>%(a)d</b> (<b>%(b)s</b>/day)") % (
{'a': np, 'b': fmtFloat(np / float(182))}))
np = self.getNewPeriod(365)
html += tr(_("In last year"), _("<b>%(a)d</b> (<b>%(b)s</b>/day)") % (
{'a': np, 'b': fmtFloat(np / float(365))}))
html += "</table>"
html += "<br><br><b>" + _("Average New Seen") + "</b>"
if sys.platform.startswith("darwin"):
html += "<table width=250>"
else:
html += "<table width=200>"
np = self.getFirstPeriod(7)
html += tr(_("In last week"), _("<b>%(a)d</b> (<b>%(b)s</b>/day)") % (
{'a': np, 'b': fmtFloat(np / float(7))}))
np = self.getFirstPeriod(30)
html += tr(_("In last month"), _("<b>%(a)d</b> (<b>%(b)s</b>/day)") % (
{'a': np, 'b': fmtFloat(np / float(30))}))
np = self.getFirstPeriod(92)
html += tr(_("In last 3 months"), _("<b>%(a)d</b> (<b>%(b)s</b>/day)") % (
{'a': np, 'b': fmtFloat(np / float(92))}))
np = self.getFirstPeriod(182)
html += tr(_("In last 6 months"), _("<b>%(a)d</b> (<b>%(b)s</b>/day)") % (
{'a': np, 'b': fmtFloat(np / float(182))}))
np = self.getFirstPeriod(365)
html += tr(_("In last year"), _("<b>%(a)d</b> (<b>%(b)s</b>/day)") % (
{'a': np, 'b': fmtFloat(np / float(365))}))
html += "</table>"
html += "<br><br><b>" + _("Card Ease") + "</b><br>"
html += _("Lowest factor: %.2f") % d.s.scalar(
"select min(factor) from cards") + "<br>"
html += _("Average factor: %.2f") % d.s.scalar(
"select avg(factor) from cards") + "<br>"
html += _("Highest factor: %.2f") % d.s.scalar(
"select max(factor) from cards") + "<br>"
html = runFilter("deckStats", html)
return html
def getDaysReviewed(self, start, finish):
now = datetime.datetime.today()
x = now + datetime.timedelta(start)
y = now + datetime.timedelta(finish)
return self.deck.s.scalar(
"select count() from stats where "
"day >= :x and day <= :y and reps > 0",
x=x, y=y)
def getRepsDone(self, start, finish):
now = datetime.datetime.today()
x = time.mktime((now + datetime.timedelta(start)).timetuple())
y = time.mktime((now + datetime.timedelta(finish)).timetuple())
return self.deck.s.scalar(
"select count() from reviewHistory where time >= :x and time <= :y",
x=x, y=y)
def getAverageInterval(self):
return self.deck.s.scalar(
"select sum(interval) / count(interval) from cards "
"where cards.reps > 0") or 0
def intervalReport(self, intervals, labels, total):
boxes = self.splitIntoIntervals(intervals)
keys = boxes.keys()
keys.sort()
html = ""
for key in keys:
html += ("<tr><td align=right>%s</td><td align=right>" +
"%d</td><td align=right>%s</td></tr>") % (
labels[key],
boxes[key],
fmtPerc(boxes[key] / float(total) * 100))
return html
def splitIntoIntervals(self, intervals):
boxes = {}
n = 0
for i in range(len(intervals) - 1):
(min, max) = (intervals[i], intervals[i+1])
for c in self.deck:
if c.interval > min and c.interval <= max:
boxes[n] = boxes.get(n, 0) + 1
n += 1
return boxes
def newAverage(self):
"Average number of new cards added each day."
return self.deck.cardCount / max(1, self.ageInDays())
def createdTimeStr(self):
return oldanki.utils.fmtTimeSpan(time.time() - self.deck.created)
def ageInDays(self):
return (time.time() - self.deck.created) / 86400.0
def getSumInverseRoundInterval(self):
return self.deck.s.scalar(
"select sum(1/round(max(interval, 1)+0.5)) from cards "
"where cards.reps > 0 "
"and priority > 0") or 0
def getWorkloadPeriod(self, period):
cutoff = time.time() + 86400 * period
return (self.deck.s.scalar("""
select count(id) from cards
where combinedDue < :cutoff
and priority > 0 and relativeDelay in (0,1)""", cutoff=cutoff) or 0) / float(period)
def getPastWorkloadPeriod(self, period):
cutoff = time.time() - 86400 * period
return (self.deck.s.scalar("""
select count(*) from reviewHistory
where time > :cutoff""", cutoff=cutoff) or 0) / float(period)
def getNewPeriod(self, period):
cutoff = time.time() - 86400 * period
return (self.deck.s.scalar("""
select count(id) from cards
where created > :cutoff""", cutoff=cutoff) or 0)
def getFirstPeriod(self, period):
cutoff = time.time() - 86400 * period
return (self.deck.s.scalar("""
select count(*) from reviewHistory
where reps = 1 and time > :cutoff""", cutoff=cutoff) or 0)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/stats.py
|
stats.py
|
__docformat__ = 'restructuredtext'
import time
from oldanki.db import *
reviewHistoryTable = Table(
'reviewHistory', metadata,
Column('cardId', Integer, nullable=False),
Column('time', Float, nullable=False, default=time.time),
Column('lastInterval', Float, nullable=False),
Column('nextInterval', Float, nullable=False),
Column('ease', Integer, nullable=False),
Column('delay', Float, nullable=False),
Column('lastFactor', Float, nullable=False),
Column('nextFactor', Float, nullable=False),
Column('reps', Float, nullable=False),
Column('thinkingTime', Float, nullable=False),
Column('yesCount', Float, nullable=False),
Column('noCount', Float, nullable=False),
PrimaryKeyConstraint("cardId", "time"))
class CardHistoryEntry(object):
"Create after rescheduling card."
def __init__(self, card=None, ease=None, delay=None):
if not card:
return
self.cardId = card.id
self.lastInterval = card.lastInterval
self.nextInterval = card.interval
self.lastFactor = card.lastFactor
self.nextFactor = card.factor
self.reps = card.reps
self.yesCount = card.yesCount
self.noCount = card.noCount
self.ease = ease
self.delay = delay
self.thinkingTime = card.thinkingTime()
def writeSQL(self, s):
s.statement("""
insert into reviewHistory
(cardId, lastInterval, nextInterval, ease, delay, lastFactor,
nextFactor, reps, thinkingTime, yesCount, noCount, time)
values (
:cardId, :lastInterval, :nextInterval, :ease, :delay,
:lastFactor, :nextFactor, :reps, :thinkingTime, :yesCount, :noCount,
:time)""",
cardId=self.cardId,
lastInterval=self.lastInterval,
nextInterval=self.nextInterval,
ease=self.ease,
delay=self.delay,
lastFactor=self.lastFactor,
nextFactor=self.nextFactor,
reps=self.reps,
thinkingTime=self.thinkingTime,
yesCount=self.yesCount,
noCount=self.noCount,
time=time.time())
mapper(CardHistoryEntry, reviewHistoryTable)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/history.py
|
history.py
|
__docformat__ = 'restructuredtext'
import zlib, re, urllib, urllib2, socket, time, shutil
from anki.utils import json as simplejson
import os, base64, httplib, sys, tempfile, httplib, types
from datetime import date
import oldanki, oldanki.deck, oldanki.cards
from oldanki.db import sqlite
from oldanki.errors import *
from oldanki.models import Model, FieldModel, CardModel
from oldanki.facts import Fact, Field
from oldanki.cards import Card
from oldanki.stats import Stats, globalStats
from oldanki.history import CardHistoryEntry
from oldanki.stats import globalStats
from oldanki.utils import ids2str, hexifyID, checksum
from oldanki.media import mediaFiles
from oldanki.lang import _
from hooks import runHook
if simplejson.__version__ < "1.7.3":
raise Exception("SimpleJSON must be 1.7.3 or later.")
CHUNK_SIZE = 32768
MIME_BOUNDARY = "Anki-sync-boundary"
# live
SYNC_URL = "http://ankiweb.net/sync/"
SYNC_HOST = "ankiweb.net"; SYNC_PORT = 80
# testing
#SYNC_URL = "http://localhost:8001/sync/"
#SYNC_HOST = "localhost"; SYNC_PORT = 8001
KEYS = ("models", "facts", "cards", "media")
##########################################################################
# Monkey-patch httplib to incrementally send instead of chewing up large
# amounts of memory, and track progress.
sendProgressHook = None
def incrementalSend(self, strOrFile):
if self.sock is None:
if self.auto_open:
self.connect()
else:
raise NotConnected()
if self.debuglevel > 0:
print "send:", repr(str)
try:
if (isinstance(strOrFile, str) or
isinstance(strOrFile, unicode)):
self.sock.sendall(strOrFile)
else:
cnt = 0
t = time.time()
while 1:
if sendProgressHook and time.time() - t > 1:
sendProgressHook(cnt)
t = time.time()
data = strOrFile.read(CHUNK_SIZE)
cnt += len(data)
if not data:
break
self.sock.sendall(data)
except socket.error, v:
if v[0] == 32: # Broken pipe
self.close()
raise
httplib.HTTPConnection.send = incrementalSend
def fullSyncProgressHook(cnt):
runHook("fullSyncProgress", "fromLocal", cnt)
##########################################################################
class SyncTools(object):
def __init__(self, deck=None):
self.deck = deck
self.diffs = {}
self.serverExcludedTags = []
self.timediff = 0
# Control
##########################################################################
def setServer(self, server):
self.server = server
def sync(self):
"Sync two decks locally. Reimplement this for finer control."
if not self.prepareSync(0):
return
sums = self.summaries()
payload = self.genPayload(sums)
res = self.server.applyPayload(payload)
self.applyPayloadReply(res)
self.deck.reset()
def prepareSync(self, timediff):
"Sync setup. True if sync needed."
self.localTime = self.modified()
self.remoteTime = self.server.modified()
if self.localTime == self.remoteTime:
return False
l = self._lastSync(); r = self.server._lastSync()
# set lastSync to the lower of the two sides, and account for slow
# clocks & assume it took up to 10 seconds for the reply to arrive
self.deck.lastSync = min(l, r) - timediff - 10
return True
def summaries(self):
return (self.summary(self.deck.lastSync),
self.server.summary(self.deck.lastSync))
def genPayload(self, summaries):
(lsum, rsum) = summaries
self.preSyncRefresh()
payload = {}
# first, handle models, facts and cards
for key in KEYS:
diff = self.diffSummary(lsum, rsum, key)
payload["added-" + key] = self.getObjsFromKey(diff[0], key)
payload["deleted-" + key] = diff[1]
payload["missing-" + key] = diff[2]
self.deleteObjsFromKey(diff[3], key)
# handle the remainder
if self.localTime > self.remoteTime:
payload['stats'] = self.bundleStats()
payload['history'] = self.bundleHistory()
payload['sources'] = self.bundleSources()
# finally, set new lastSync and bundle the deck info
payload['deck'] = self.bundleDeck()
return payload
def applyPayload(self, payload):
reply = {}
self.preSyncRefresh()
# model, facts and cards
for key in KEYS:
k = 'added-' + key
# send back any requested
if k in payload:
reply[k] = self.getObjsFromKey(
payload['missing-' + key], key)
self.updateObjsFromKey(payload['added-' + key], key)
self.deleteObjsFromKey(payload['deleted-' + key], key)
# send back deck-related stuff if it wasn't sent to us
if not 'deck' in payload:
reply['stats'] = self.bundleStats()
reply['history'] = self.bundleHistory()
reply['sources'] = self.bundleSources()
# finally, set new lastSync and bundle the deck info
reply['deck'] = self.bundleDeck()
else:
self.updateDeck(payload['deck'])
self.updateStats(payload['stats'])
self.updateHistory(payload['history'])
if 'sources' in payload:
self.updateSources(payload['sources'])
self.postSyncRefresh()
cardIds = [x[0] for x in payload['added-cards']]
self.deck.updateCardTags(cardIds)
# rebuild priorities on server
self.rebuildPriorities(cardIds, self.serverExcludedTags)
return reply
def applyPayloadReply(self, reply):
# model, facts and cards
for key in KEYS:
k = 'added-' + key
# old version may not send media
if k in reply:
self.updateObjsFromKey(reply['added-' + key], key)
# deck
if 'deck' in reply:
self.updateDeck(reply['deck'])
self.updateStats(reply['stats'])
self.updateHistory(reply['history'])
if 'sources' in reply:
self.updateSources(reply['sources'])
self.postSyncRefresh()
# rebuild priorities on client
cardIds = [x[0] for x in reply['added-cards']]
self.deck.updateCardTags(cardIds)
self.rebuildPriorities(cardIds)
if self.missingFacts() != 0:
raise Exception(
"Facts missing after sync. Please run Tools>Advanced>Check DB.")
def missingFacts(self):
return self.deck.s.scalar(
"select count() from cards where factId "+
"not in (select id from facts)");
def rebuildPriorities(self, cardIds, suspend=[]):
self.deck.updateAllPriorities(partial=True, dirty=False)
self.deck.updatePriorities(cardIds, suspend=suspend, dirty=False)
def postSyncRefresh(self):
"Flush changes to DB, and reload object associations."
self.deck.s.flush()
self.deck.s.refresh(self.deck)
self.deck.currentModel
def preSyncRefresh(self):
# ensure global stats are available (queue may not be built)
self.deck._globalStats = globalStats(self.deck)
def payloadChanges(self, payload):
h = {
'lf': len(payload['added-facts']['facts']),
'rf': len(payload['missing-facts']),
'lc': len(payload['added-cards']),
'rc': len(payload['missing-cards']),
'lm': len(payload['added-models']),
'rm': len(payload['missing-models']),
}
if self.localTime > self.remoteTime:
h['ls'] = _('all')
h['rs'] = 0
else:
h['ls'] = 0
h['rs'] = _('all')
return h
def payloadChangeReport(self, payload):
p = self.payloadChanges(payload)
return _("""\
<table>
<tr><td><b>Added/Changed </b></td>
<td><b>Here </b></td><td><b>Server</b></td></tr>
<tr><td>Cards</td><td>%(lc)d</td><td>%(rc)d</td></tr>
<tr><td>Facts</td><td>%(lf)d</td><td>%(rf)d</td></tr>
<tr><td>Models</td><td>%(lm)d</td><td>%(rm)d</td></tr>
<tr><td>Stats</td><td>%(ls)s</td><td>%(rs)s</td></tr>
</table>""") % p
# Summaries
##########################################################################
def summary(self, lastSync):
"Generate a full summary of modtimes for two-way syncing."
# client may have selected an earlier sync time
self.deck.lastSync = lastSync
# ensure we're flushed first
self.deck.s.flush()
return {
# cards
"cards": self.realLists(self.deck.s.all(
"select id, modified from cards where modified > :mod",
mod=lastSync)),
"delcards": self.realLists(self.deck.s.all(
"select cardId, deletedTime from cardsDeleted "
"where deletedTime > :mod", mod=lastSync)),
# facts
"facts": self.realLists(self.deck.s.all(
"select id, modified from facts where modified > :mod",
mod=lastSync)),
"delfacts": self.realLists(self.deck.s.all(
"select factId, deletedTime from factsDeleted "
"where deletedTime > :mod", mod=lastSync)),
# models
"models": self.realLists(self.deck.s.all(
"select id, modified from models where modified > :mod",
mod=lastSync)),
"delmodels": self.realLists(self.deck.s.all(
"select modelId, deletedTime from modelsDeleted "
"where deletedTime > :mod", mod=lastSync)),
# media
"media": self.realLists(self.deck.s.all(
"select id, created from media where created > :mod",
mod=lastSync)),
"delmedia": self.realLists(self.deck.s.all(
"select mediaId, deletedTime from mediaDeleted "
"where deletedTime > :mod", mod=lastSync)),
}
# Diffing
##########################################################################
def diffSummary(self, localSummary, remoteSummary, key):
# list of ids on both ends
lexists = localSummary[key]
ldeleted = localSummary["del"+key]
rexists = remoteSummary[key]
rdeleted = remoteSummary["del"+key]
ldeletedIds = dict(ldeleted)
rdeletedIds = dict(rdeleted)
# to store the results
locallyEdited = []
locallyDeleted = []
remotelyEdited = []
remotelyDeleted = []
# build a hash of all ids, with value (localMod, remoteMod).
# deleted/nonexisting cards are marked with a modtime of None.
ids = {}
for (id, mod) in rexists:
ids[id] = [None, mod]
for (id, mod) in rdeleted:
ids[id] = [None, None]
for (id, mod) in lexists:
if id in ids:
ids[id][0] = mod
else:
ids[id] = [mod, None]
for (id, mod) in ldeleted:
if id in ids:
ids[id][0] = None
else:
ids[id] = [None, None]
# loop through the hash, determining differences
for (id, (localMod, remoteMod)) in ids.items():
if localMod and remoteMod:
# changed/existing on both sides
if localMod < remoteMod:
remotelyEdited.append(id)
elif localMod > remoteMod:
locallyEdited.append(id)
elif localMod and not remoteMod:
# if it's missing on server or newer here, sync
if (id not in rdeletedIds or
rdeletedIds[id] < localMod):
locallyEdited.append(id)
else:
remotelyDeleted.append(id)
elif remoteMod and not localMod:
# if it's missing locally or newer there, sync
if (id not in ldeletedIds or
ldeletedIds[id] < remoteMod):
remotelyEdited.append(id)
else:
locallyDeleted.append(id)
else:
if id in ldeletedIds and id not in rdeletedIds:
locallyDeleted.append(id)
elif id in rdeletedIds and id not in ldeletedIds:
remotelyDeleted.append(id)
return (locallyEdited, locallyDeleted,
remotelyEdited, remotelyDeleted)
# Models
##########################################################################
def getModels(self, ids, updateModified=False):
return [self.bundleModel(id, updateModified) for id in ids]
def bundleModel(self, id, updateModified):
"Return a model representation suitable for transport."
mod = self.deck.s.query(Model).get(id)
# force load of lazy attributes
mod.fieldModels; mod.cardModels
m = self.dictFromObj(mod)
m['fieldModels'] = [self.bundleFieldModel(fm) for fm in m['fieldModels']]
m['cardModels'] = [self.bundleCardModel(fm) for fm in m['cardModels']]
if updateModified:
m['modified'] = time.time()
return m
def bundleFieldModel(self, fm):
d = self.dictFromObj(fm)
if 'model' in d: del d['model']
return d
def bundleCardModel(self, cm):
d = self.dictFromObj(cm)
if 'model' in d: del d['model']
return d
def updateModels(self, models):
for model in models:
local = self.getModel(model['id'])
# avoid overwriting any existing card/field models
fms = model['fieldModels']; del model['fieldModels']
cms = model['cardModels']; del model['cardModels']
self.applyDict(local, model)
self.mergeFieldModels(local, fms)
self.mergeCardModels(local, cms)
self.deck.s.statement(
"delete from modelsDeleted where modelId in %s" %
ids2str([m['id'] for m in models]))
def getModel(self, id, create=True):
"Return a local model with same ID, or create."
id = int(id)
for l in self.deck.models:
if l.id == id:
return l
if not create:
return
m = Model()
self.deck.models.append(m)
return m
def mergeFieldModels(self, model, fms):
ids = []
for fm in fms:
local = self.getFieldModel(model, fm)
self.applyDict(local, fm)
ids.append(fm['id'])
for fm in model.fieldModels:
if fm.id not in ids:
self.deck.deleteFieldModel(model, fm)
def getFieldModel(self, model, remote):
id = int(remote['id'])
for fm in model.fieldModels:
if fm.id == id:
return fm
fm = FieldModel()
model.addFieldModel(fm)
return fm
def mergeCardModels(self, model, cms):
ids = []
for cm in cms:
local = self.getCardModel(model, cm)
if not 'allowEmptyAnswer' in cm or cm['allowEmptyAnswer'] is None:
cm['allowEmptyAnswer'] = True
self.applyDict(local, cm)
ids.append(cm['id'])
for cm in model.cardModels:
if cm.id not in ids:
self.deck.deleteCardModel(model, cm)
def getCardModel(self, model, remote):
id = int(remote['id'])
for cm in model.cardModels:
if cm.id == id:
return cm
cm = CardModel()
model.addCardModel(cm)
return cm
def deleteModels(self, ids):
for id in ids:
model = self.getModel(id, create=False)
if model:
self.deck.deleteModel(model)
# Facts
##########################################################################
def getFacts(self, ids, updateModified=False):
if updateModified:
modified = time.time()
else:
modified = "modified"
factIds = ids2str(ids)
return {
'facts': self.realLists(self.deck.s.all("""
select id, modelId, created, %s, tags, spaceUntil, lastCardId from facts
where id in %s""" % (modified, factIds))),
'fields': self.realLists(self.deck.s.all("""
select id, factId, fieldModelId, ordinal, value from fields
where factId in %s""" % factIds))
}
def updateFacts(self, factsdict):
facts = factsdict['facts']
fields = factsdict['fields']
if not facts:
return
# update facts first
dlist = [{
'id': f[0],
'modelId': f[1],
'created': f[2],
'modified': f[3],
'tags': f[4],
'spaceUntil': f[5] or "",
'lastCardId': f[6]
} for f in facts]
self.deck.s.execute("""
insert or replace into facts
(id, modelId, created, modified, tags, spaceUntil, lastCardId)
values
(:id, :modelId, :created, :modified, :tags, :spaceUntil, :lastCardId)""", dlist)
# now fields
dlist = [{
'id': f[0],
'factId': f[1],
'fieldModelId': f[2],
'ordinal': f[3],
'value': f[4]
} for f in fields]
# delete local fields since ids may have changed
self.deck.s.execute(
"delete from fields where factId in %s" %
ids2str([f[0] for f in facts]))
# then update
self.deck.s.execute("""
insert into fields
(id, factId, fieldModelId, ordinal, value)
values
(:id, :factId, :fieldModelId, :ordinal, :value)""", dlist)
self.deck.s.statement(
"delete from factsDeleted where factId in %s" %
ids2str([f[0] for f in facts]))
def deleteFacts(self, ids):
self.deck.deleteFacts(ids)
# Cards
##########################################################################
def getCards(self, ids):
return self.realLists(self.deck.s.all("""
select id, factId, cardModelId, created, modified, tags, ordinal,
priority, interval, lastInterval, due, lastDue, factor,
firstAnswered, reps, successive, averageTime, reviewTime, youngEase0,
youngEase1, youngEase2, youngEase3, youngEase4, matureEase0,
matureEase1, matureEase2, matureEase3, matureEase4, yesCount, noCount,
question, answer, lastFactor, spaceUntil, type, combinedDue, relativeDelay
from cards where id in %s""" % ids2str(ids)))
def updateCards(self, cards):
if not cards:
return
# FIXME: older clients won't send this, so this is temp compat code
def getType(row):
if len(row) > 36:
return row[36]
if row[15]:
return 1
elif row[14]:
return 0
return 2
dlist = [{'id': c[0],
'factId': c[1],
'cardModelId': c[2],
'created': c[3],
'modified': c[4],
'tags': c[5],
'ordinal': c[6],
'priority': c[7],
'interval': c[8],
'lastInterval': c[9],
'due': c[10],
'lastDue': c[11],
'factor': c[12],
'firstAnswered': c[13],
'reps': c[14],
'successive': c[15],
'averageTime': c[16],
'reviewTime': c[17],
'youngEase0': c[18],
'youngEase1': c[19],
'youngEase2': c[20],
'youngEase3': c[21],
'youngEase4': c[22],
'matureEase0': c[23],
'matureEase1': c[24],
'matureEase2': c[25],
'matureEase3': c[26],
'matureEase4': c[27],
'yesCount': c[28],
'noCount': c[29],
'question': c[30],
'answer': c[31],
'lastFactor': c[32],
'spaceUntil': c[33],
'type': c[34],
'combinedDue': c[35],
'rd': getType(c)
} for c in cards]
self.deck.s.execute("""
insert or replace into cards
(id, factId, cardModelId, created, modified, tags, ordinal,
priority, interval, lastInterval, due, lastDue, factor,
firstAnswered, reps, successive, averageTime, reviewTime, youngEase0,
youngEase1, youngEase2, youngEase3, youngEase4, matureEase0,
matureEase1, matureEase2, matureEase3, matureEase4, yesCount, noCount,
question, answer, lastFactor, spaceUntil, type, combinedDue,
relativeDelay, isDue)
values
(:id, :factId, :cardModelId, :created, :modified, :tags, :ordinal,
:priority, :interval, :lastInterval, :due, :lastDue, :factor,
:firstAnswered, :reps, :successive, :averageTime, :reviewTime, :youngEase0,
:youngEase1, :youngEase2, :youngEase3, :youngEase4, :matureEase0,
:matureEase1, :matureEase2, :matureEase3, :matureEase4, :yesCount,
:noCount, :question, :answer, :lastFactor, :spaceUntil,
:type, :combinedDue, :rd, 0)""", dlist)
self.deck.s.statement(
"delete from cardsDeleted where cardId in %s" %
ids2str([c[0] for c in cards]))
def deleteCards(self, ids):
self.deck.deleteCards(ids)
# Deck/stats/history
##########################################################################
def bundleDeck(self):
# ensure modified is not greater than server time
if getattr(self, "server", None) and getattr(
self.server, "timestamp", None):
self.deck.modified = min(self.deck.modified,self.server.timestamp)
# and ensure lastSync is greater than modified
self.deck.lastSync = max(time.time(), self.deck.modified+1)
d = self.dictFromObj(self.deck)
del d['Session']
del d['engine']
del d['s']
del d['path']
del d['syncName']
del d['version']
if 'newQueue' in d:
del d['newQueue']
del d['failedQueue']
del d['revQueue']
# these may be deleted before bundling
if 'css' in d: del d['css']
if 'models' in d: del d['models']
if 'currentModel' in d: del d['currentModel']
keys = d.keys()
for k in keys:
if isinstance(d[k], types.MethodType):
del d[k]
d['meta'] = self.realLists(self.deck.s.all("select * from deckVars"))
return d
def updateDeck(self, deck):
if 'meta' in deck:
meta = deck['meta']
for (k,v) in meta:
self.deck.s.statement("""
insert or replace into deckVars
(key, value) values (:k, :v)""", k=k, v=v)
del deck['meta']
self.applyDict(self.deck, deck)
def bundleStats(self):
def bundleStat(stat):
s = self.dictFromObj(stat)
s['day'] = s['day'].toordinal()
del s['id']
return s
lastDay = date.fromtimestamp(max(0, self.deck.lastSync - 60*60*24))
ids = self.deck.s.column0(
"select id from stats where type = 1 and day >= :day", day=lastDay)
stat = Stats()
def statFromId(id):
stat.fromDB(self.deck.s, id)
return stat
stats = {
'global': bundleStat(self.deck._globalStats),
'daily': [bundleStat(statFromId(id)) for id in ids],
}
return stats
def updateStats(self, stats):
stats['global']['day'] = date.fromordinal(stats['global']['day'])
self.applyDict(self.deck._globalStats, stats['global'])
self.deck._globalStats.toDB(self.deck.s)
for record in stats['daily']:
record['day'] = date.fromordinal(record['day'])
stat = Stats()
id = self.deck.s.scalar("select id from stats where "
"type = :type and day = :day",
type=1, day=record['day'])
if id:
stat.fromDB(self.deck.s, id)
else:
stat.create(self.deck.s, 1, record['day'])
self.applyDict(stat, record)
stat.toDB(self.deck.s)
def bundleHistory(self):
return self.realLists(self.deck.s.all("""
select cardId, time, lastInterval, nextInterval, ease, delay,
lastFactor, nextFactor, reps, thinkingTime, yesCount, noCount
from reviewHistory where time > :ls""",
ls=self.deck.lastSync))
def updateHistory(self, history):
dlist = [{'cardId': h[0],
'time': h[1],
'lastInterval': h[2],
'nextInterval': h[3],
'ease': h[4],
'delay': h[5],
'lastFactor': h[6],
'nextFactor': h[7],
'reps': h[8],
'thinkingTime': h[9],
'yesCount': h[10],
'noCount': h[11]} for h in history]
if not dlist:
return
self.deck.s.statements("""
insert or ignore into reviewHistory
(cardId, time, lastInterval, nextInterval, ease, delay,
lastFactor, nextFactor, reps, thinkingTime, yesCount, noCount)
values
(:cardId, :time, :lastInterval, :nextInterval, :ease, :delay,
:lastFactor, :nextFactor, :reps, :thinkingTime, :yesCount, :noCount)""",
dlist)
def bundleSources(self):
return self.realLists(self.deck.s.all("select * from sources"))
def updateSources(self, sources):
for s in sources:
self.deck.s.statement("""
insert or replace into sources values
(:id, :name, :created, :lastSync, :syncPeriod)""",
id=s[0],
name=s[1],
created=s[2],
lastSync=s[3],
syncPeriod=s[4])
# Media metadata
##########################################################################
def getMedia(self, ids):
return [tuple(row) for row in self.deck.s.all("""
select id, filename, size, created, originalPath, description
from media where id in %s""" % ids2str(ids))]
def updateMedia(self, media):
meta = []
for m in media:
# build meta
meta.append({
'id': m[0],
'filename': m[1],
'size': m[2],
'created': m[3],
'originalPath': m[4],
'description': m[5]})
# apply metadata
if meta:
self.deck.s.statements("""
insert or replace into media (id, filename, size, created,
originalPath, description)
values (:id, :filename, :size, :created, :originalPath,
:description)""", meta)
self.deck.s.statement(
"delete from mediaDeleted where mediaId in %s" %
ids2str([m[0] for m in media]))
def deleteMedia(self, ids):
sids = ids2str(ids)
files = self.deck.s.column0(
"select filename from media where id in %s" % sids)
self.deck.s.statement("""
insert into mediaDeleted
select id, :now from media
where media.id in %s""" % sids, now=time.time())
self.deck.s.execute(
"delete from media where id in %s" % sids)
# One-way syncing (sharing)
##########################################################################
def syncOneWay(self, lastSync):
"Sync two decks one way."
payload = self.server.genOneWayPayload(lastSync)
self.applyOneWayPayload(payload)
self.deck.reset()
def syncOneWayDeckName(self):
return (self.deck.s.scalar("select name from sources where id = :id",
id=self.server.deckName) or
hexifyID(int(self.server.deckName)))
def prepareOneWaySync(self):
"Sync setup. True if sync needed. Not used for local sync."
srcID = self.server.deckName
(lastSync, syncPeriod) = self.deck.s.first(
"select lastSync, syncPeriod from sources where id = :id", id=srcID)
if self.server.modified() <= lastSync:
return
self.deck.lastSync = lastSync
return True
def genOneWayPayload(self, lastSync):
"Bundle all added or changed objects since the last sync."
p = {}
# facts
factIds = self.deck.s.column0(
"select id from facts where modified > :l", l=lastSync)
p['facts'] = self.getFacts(factIds, updateModified=True)
# models
modelIds = self.deck.s.column0(
"select id from models where modified > :l", l=lastSync)
p['models'] = self.getModels(modelIds, updateModified=True)
# media
mediaIds = self.deck.s.column0(
"select id from media where created > :l", l=lastSync)
p['media'] = self.getMedia(mediaIds)
# cards
cardIds = self.deck.s.column0(
"select id from cards where modified > :l", l=lastSync)
p['cards'] = self.realLists(self.getOneWayCards(cardIds))
return p
def applyOneWayPayload(self, payload):
keys = [k for k in KEYS if k != "cards"]
# model, facts, media
for key in keys:
self.updateObjsFromKey(payload[key], key)
# models need their source tagged
for m in payload["models"]:
self.deck.s.statement("update models set source = :s "
"where id = :id",
s=self.server.deckName,
id=m['id'])
# cards last, handled differently
t = time.time()
try:
self.updateOneWayCards(payload['cards'])
except KeyError:
sys.stderr.write("Subscribed to a broken deck. "
"Try removing your deck subscriptions.")
t = 0
# update sync time
self.deck.s.statement(
"update sources set lastSync = :t where id = :id",
id=self.server.deckName, t=t)
self.deck.modified = time.time()
def getOneWayCards(self, ids):
"The minimum information necessary to generate one way cards."
return self.deck.s.all(
"select id, factId, cardModelId, ordinal, created from cards "
"where id in %s" % ids2str(ids))
def updateOneWayCards(self, cards):
if not cards:
return
t = time.time()
dlist = [{'id': c[0], 'factId': c[1], 'cardModelId': c[2],
'ordinal': c[3], 'created': c[4], 't': t} for c in cards]
# add any missing cards
self.deck.s.statements("""
insert or ignore into cards
(id, factId, cardModelId, created, modified, tags, ordinal,
priority, interval, lastInterval, due, lastDue, factor,
firstAnswered, reps, successive, averageTime, reviewTime, youngEase0,
youngEase1, youngEase2, youngEase3, youngEase4, matureEase0,
matureEase1, matureEase2, matureEase3, matureEase4, yesCount, noCount,
question, answer, lastFactor, spaceUntil, isDue, type, combinedDue,
relativeDelay)
values
(:id, :factId, :cardModelId, :created, :t, "", :ordinal,
1, 0, 0, :created, 0, 2.5,
0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, 0, 0, 0, 0,
0, "", "", 2.5, 0, 0, 2, :t, 2)""", dlist)
# update q/as
models = dict(self.deck.s.all("""
select cards.id, models.id
from cards, facts, models
where cards.factId = facts.id
and facts.modelId = models.id
and cards.id in %s""" % ids2str([c[0] for c in cards])))
self.deck.s.flush()
self.deck.updateCardQACache(
[(c[0], c[2], c[1], models[c[0]]) for c in cards])
# rebuild priorities on client
cardIds = [c[0] for c in cards]
self.deck.updateCardTags(cardIds)
self.rebuildPriorities(cardIds)
# Tools
##########################################################################
def modified(self):
return self.deck.modified
def _lastSync(self):
return self.deck.lastSync
def unstuff(self, data):
"Uncompress and convert to unicode."
return simplejson.loads(unicode(zlib.decompress(data), "utf8"))
def stuff(self, data):
"Convert into UTF-8 and compress."
return zlib.compress(simplejson.dumps(data))
def dictFromObj(self, obj):
"Return a dict representing OBJ without any hidden db fields."
return dict([(k,v) for (k,v) in obj.__dict__.items()
if not k.startswith("_")])
def applyDict(self, obj, dict):
"Apply each element in DICT to OBJ in a way the ORM notices."
for (k,v) in dict.items():
setattr(obj, k, v)
def realLists(self, result):
"Convert an SQLAlchemy response into a list of real lists."
return [list(x) for x in result]
def getObjsFromKey(self, ids, key):
return getattr(self, "get" + key.capitalize())(ids)
def deleteObjsFromKey(self, ids, key):
return getattr(self, "delete" + key.capitalize())(ids)
def updateObjsFromKey(self, ids, key):
return getattr(self, "update" + key.capitalize())(ids)
# Full sync
##########################################################################
def needFullSync(self, sums):
if self.deck.lastSync <= 0:
return True
for sum in sums:
for l in sum.values():
if len(l) > 1000:
return True
if self.deck.s.scalar(
"select count() from reviewHistory where time > :ls",
ls=self.deck.lastSync) > 1000:
return True
lastDay = date.fromtimestamp(max(0, self.deck.lastSync - 60*60*24))
if self.deck.s.scalar(
"select count() from stats where day >= :day",
day=lastDay) > 100:
return True
return False
def prepareFullSync(self):
t = time.time()
# ensure modified is not greater than server time
self.deck.modified = min(self.deck.modified, self.server.timestamp)
self.deck.s.commit()
self.deck.close()
fields = {
"p": self.server.password,
"u": self.server.username,
"d": self.server.deckName.encode("utf-8"),
}
if self.localTime > self.remoteTime:
return ("fromLocal", fields, self.deck.path)
else:
return ("fromServer", fields, self.deck.path)
def fullSync(self):
ret = self.prepareFullSync()
if ret[0] == "fromLocal":
self.fullSyncFromLocal(ret[1], ret[2])
else:
self.fullSyncFromServer(ret[1], ret[2])
def fullSyncFromLocal(self, fields, path):
global sendProgressHook
try:
# write into a temporary file, since POST needs content-length
src = open(path, "rb")
(fd, name) = tempfile.mkstemp(prefix="oldanki")
tmp = open(name, "w+b")
# post vars
for (key, value) in fields.items():
tmp.write('--' + MIME_BOUNDARY + "\r\n")
tmp.write('Content-Disposition: form-data; name="%s"\r\n' % key)
tmp.write('\r\n')
tmp.write(value)
tmp.write('\r\n')
# file header
tmp.write('--' + MIME_BOUNDARY + "\r\n")
tmp.write(
'Content-Disposition: form-data; name="deck"; filename="deck"\r\n')
tmp.write('Content-Type: application/octet-stream\r\n')
tmp.write('\r\n')
# data
comp = zlib.compressobj()
while 1:
data = src.read(CHUNK_SIZE)
if not data:
tmp.write(comp.flush())
break
tmp.write(comp.compress(data))
src.close()
tmp.write('\r\n--' + MIME_BOUNDARY + '--\r\n\r\n')
size = tmp.tell()
tmp.seek(0)
# open http connection
runHook("fullSyncStarted", size)
headers = {
'Content-type': 'multipart/form-data; boundary=%s' %
MIME_BOUNDARY,
'Content-length': str(size),
'Host': SYNC_HOST,
}
req = urllib2.Request(SYNC_URL + "fullup?v=2", tmp, headers)
try:
sendProgressHook = fullSyncProgressHook
res = urllib2.urlopen(req).read()
assert res.startswith("OK")
# update lastSync
c = sqlite.connect(path)
c.execute("update decks set lastSync = ?",
(res[3:],))
c.commit()
c.close()
finally:
sendProgressHook = None
tmp.close()
os.close(fd)
os.unlink(name)
finally:
runHook("fullSyncFinished")
def fullSyncFromServer(self, fields, path):
try:
runHook("fullSyncStarted", 0)
fields = urllib.urlencode(fields)
src = urllib.urlopen(SYNC_URL + "fulldown", fields)
(fd, tmpname) = tempfile.mkstemp(dir=os.path.dirname(path),
prefix="fullsync")
tmp = open(tmpname, "wb")
decomp = zlib.decompressobj()
cnt = 0
while 1:
data = src.read(CHUNK_SIZE)
if not data:
tmp.write(decomp.flush())
break
tmp.write(decomp.decompress(data))
cnt += CHUNK_SIZE
runHook("fullSyncProgress", "fromServer", cnt)
src.close()
tmp.close()
os.close(fd)
# if we were successful, overwrite old deck
os.unlink(path)
os.rename(tmpname, path)
# reset the deck name
c = sqlite.connect(path)
c.execute("update decks set syncName = ?",
[checksum(path.encode("utf-8"))])
c.commit()
c.close()
finally:
runHook("fullSyncFinished")
# Local syncing
##########################################################################
class SyncServer(SyncTools):
def __init__(self, deck=None):
SyncTools.__init__(self, deck)
class SyncClient(SyncTools):
pass
# HTTP proxy: act as a server and direct requests to the real server
##########################################################################
class HttpSyncServerProxy(SyncServer):
def __init__(self, user, passwd):
SyncServer.__init__(self)
self.decks = None
self.deckName = None
self.username = user
self.password = passwd
self.protocolVersion = 5
self.sourcesToCheck = []
def connect(self, clientVersion=""):
"Check auth, protocol & grab deck list."
if not self.decks:
import socket
socket.setdefaulttimeout(30)
d = self.runCmd("getDecks",
libanki=oldanki.version,
client=clientVersion,
sources=simplejson.dumps(self.sourcesToCheck),
pversion=self.protocolVersion)
socket.setdefaulttimeout(None)
if d['status'] != "OK":
raise SyncError(type="authFailed", status=d['status'])
self.decks = d['decks']
self.timestamp = d['timestamp']
self.timediff = abs(self.timestamp - time.time())
def hasDeck(self, deckName):
self.connect()
return deckName in self.decks.keys()
def availableDecks(self):
self.connect()
return self.decks.keys()
def createDeck(self, deckName):
ret = self.runCmd("createDeck", name=deckName.encode("utf-8"))
if not ret or ret['status'] != "OK":
raise SyncError(type="createFailed")
self.decks[deckName] = [0, 0]
def summary(self, lastSync):
return self.runCmd("summary",
lastSync=self.stuff(lastSync))
def genOneWayPayload(self, lastSync):
return self.runCmd("genOneWayPayload",
lastSync=self.stuff(lastSync))
def modified(self):
self.connect()
return self.decks[self.deckName][0]
def _lastSync(self):
self.connect()
return self.decks[self.deckName][1]
def applyPayload(self, payload):
return self.runCmd("applyPayload",
payload=self.stuff(payload))
def finish(self):
assert self.runCmd("finish") == "OK"
def runCmd(self, action, **args):
data = {"p": self.password,
"u": self.username,
"v": 2}
if self.deckName:
data['d'] = self.deckName.encode("utf-8")
else:
data['d'] = None
data.update(args)
data = urllib.urlencode(data)
try:
f = urllib2.urlopen(SYNC_URL + action, data)
except (urllib2.URLError, socket.error, socket.timeout,
httplib.BadStatusLine), e:
raise SyncError(type="connectionError",
exc=`e`)
ret = f.read()
if not ret:
raise SyncError(type="noResponse")
try:
return self.unstuff(ret)
except Exception, e:
raise SyncError(type="connectionError",
exc=`e`)
# HTTP server: respond to proxy requests and return data
##########################################################################
class HttpSyncServer(SyncServer):
def __init__(self):
SyncServer.__init__(self)
self.decks = {}
self.deck = None
def summary(self, lastSync):
return self.stuff(SyncServer.summary(
self, float(zlib.decompress(lastSync))))
def applyPayload(self, payload):
return self.stuff(SyncServer.applyPayload(self,
self.unstuff(payload)))
def genOneWayPayload(self, lastSync):
return self.stuff(SyncServer.genOneWayPayload(
self, float(zlib.decompress(lastSync))))
def getDecks(self, libanki, client, sources, pversion):
return self.stuff({
"status": "OK",
"decks": self.decks,
"timestamp": time.time(),
})
def createDeck(self, name):
"Create a deck on the server. Not implemented."
return self.stuff("OK")
# Local media copying
##########################################################################
def copyLocalMedia(src, dst):
srcDir = src.mediaDir()
if not srcDir:
return
dstDir = dst.mediaDir(create=True)
files = os.listdir(srcDir)
# find media references
used = {}
for col in ("question", "answer"):
txt = dst.s.column0("""
select %(c)s from cards where
%(c)s like '%%<img %%'
or %(c)s like '%%[sound:%%'""" % {'c': col})
for entry in txt:
for fname in mediaFiles(entry):
used[fname] = True
# copy only used media
for file in files:
if file not in used:
continue
srcfile = os.path.join(srcDir, file)
dstfile = os.path.join(dstDir, file)
if not os.path.exists(dstfile):
try:
shutil.copy2(srcfile, dstfile)
except IOError, OSError:
pass
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/sync.py
|
sync.py
|
__docformat__ = 'restructuredtext'
import re, os, random, time, types, math, htmlentitydefs, subprocess
try:
import hashlib
md5 = hashlib.md5
except ImportError:
import md5
md5 = md5.new
from oldanki.db import *
from oldanki.lang import _, ngettext
import locale, sys
if sys.version_info[1] < 5:
def format_string(a, b):
return a % b
locale.format_string = format_string
# Time handling
##############################################################################
timeTable = {
"years": lambda n: ngettext("%s year", "%s years", n),
"months": lambda n: ngettext("%s month", "%s months", n),
"days": lambda n: ngettext("%s day", "%s days", n),
"hours": lambda n: ngettext("%s hour", "%s hours", n),
"minutes": lambda n: ngettext("%s minute", "%s minutes", n),
"seconds": lambda n: ngettext("%s second", "%s seconds", n),
}
afterTimeTable = {
"years": lambda n: ngettext("%s year<!--after-->", "%s years<!--after-->", n),
"months": lambda n: ngettext("%s month<!--after-->", "%s months<!--after-->", n),
"days": lambda n: ngettext("%s day<!--after-->", "%s days<!--after-->", n),
"hours": lambda n: ngettext("%s hour<!--after-->", "%s hours<!--after-->", n),
"minutes": lambda n: ngettext("%s minute<!--after-->", "%s minutes<!--after-->", n),
"seconds": lambda n: ngettext("%s second<!--after-->", "%s seconds<!--after-->", n),
}
shortTimeTable = {
"years": _("%sy"),
"months": _("%sm"),
"days": _("%sd"),
"hours": _("%sh"),
"minutes": _("%sm"),
"seconds": _("%ss"),
}
def fmtTimeSpan(time, pad=0, point=0, short=False, after=False):
"Return a string representing a time span (eg '2 days')."
(type, point) = optimalPeriod(time, point)
time = convertSecondsTo(time, type)
if not point:
time = math.floor(time)
if short:
fmt = shortTimeTable[type]
else:
if after:
fmt = afterTimeTable[type](_pluralCount(time, point))
else:
fmt = timeTable[type](_pluralCount(time, point))
timestr = "%(a)d.%(b)df" % {'a': pad, 'b': point}
return locale.format_string("%" + (fmt % timestr), time)
def optimalPeriod(time, point):
if abs(time) < 60:
type = "seconds"
point -= 1
elif abs(time) < 3599:
type = "minutes"
elif abs(time) < 60 * 60 * 24:
type = "hours"
elif abs(time) < 60 * 60 * 24 * 30:
type = "days"
elif abs(time) < 60 * 60 * 24 * 365:
type = "months"
point += 1
else:
type = "years"
point += 1
return (type, max(point, 0))
def convertSecondsTo(seconds, type):
if type == "seconds":
return seconds
elif type == "minutes":
return seconds / 60.0
elif type == "hours":
return seconds / 3600.0
elif type == "days":
return seconds / 86400.0
elif type == "months":
return seconds / 2592000.0
elif type == "years":
return seconds / 31536000.0
assert False
def _pluralCount(time, point):
if point:
return 2
return math.floor(time)
# Locale
##############################################################################
def fmtPercentage(float_value, point=1):
"Return float with percentage sign"
fmt = '%' + "0.%(b)df" % {'b': point}
return locale.format_string(fmt, float_value) + "%"
def fmtFloat(float_value, point=1):
"Return a string with decimal separator according to current locale"
fmt = '%' + "0.%(b)df" % {'b': point}
return locale.format_string(fmt, float_value)
# HTML
##############################################################################
def stripHTML(s):
s = re.sub("(?s)<style.*?>.*?</style>", "", s)
s = re.sub("(?s)<script.*?>.*?</script>", "", s)
s = re.sub("<.*?>", "", s)
s = entsToTxt(s)
return s
def stripHTMLAlt(s):
"Strip HTML, preserving img alt text."
s = re.sub("<img [^>]*alt=[\"']?([^\"'>]+)[\"']?[^>]*>", "\\1", s)
return stripHTML(s)
def stripHTMLMedia(s):
"Strip HTML but keep media filenames"
s = re.sub("<img src=[\"']?([^\"'>]+)[\"']? ?/?>", " \\1 ", s)
return stripHTML(s)
def tidyHTML(html):
"Remove cruft like body tags and return just the important part."
# contents of body - no head or html tags
html = re.sub(u".*<body.*?>(.*)</body></html>",
"\\1", html.replace("\n", u""))
# strip superfluous Qt formatting
html = re.sub(u"(?:-qt-table-type: root; )?"
"margin-top:\d+px; margin-bottom:\d+px; margin-left:\d+px; "
"margin-right:\d+px;(?: -qt-block-indent:0; "
"text-indent:0px;)?", u"", html)
html = re.sub(u"-qt-paragraph-type:empty;", u"", html)
# strip leading space in style statements, and remove if no contents
html = re.sub(u'style=" ', u'style="', html)
html = re.sub(u' style=""', u"", html)
# convert P tags into SPAN and/or BR
html = re.sub(u'<p( style=.+?)>(.*?)</p>', u'<span\\1>\\2</span><br>', html)
html = re.sub(u'<p>(.*?)</p>', u'\\1<br>', html)
html = re.sub(u'<br>$', u'', html)
html = re.sub(u"^<table><tr><td style=\"border: none;\">(.*)<br></td></tr></table>$", u"\\1", html)
# this is being added by qt's html editor, and leads to unwanted spaces
html = re.sub(u"^<p dir='rtl'>(.*?)</p>$", u'\\1', html)
html = re.sub(u"^<br />$", "", html)
return html
def entsToTxt(html):
def fixup(m):
text = m.group(0)
if text[:2] == "&#":
# character reference
try:
if text[:3] == "&#x":
return unichr(int(text[3:-1], 16))
else:
return unichr(int(text[2:-1]))
except ValueError:
pass
else:
# named entity
try:
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]])
except KeyError:
pass
return text # leave as is
return re.sub("&#?\w+;", fixup, html)
# IDs
##############################################################################
def genID(static=[]):
"Generate a random, unique 64bit ID."
# 23 bits of randomness, 41 bits of current time
# random rather than a counter to ensure efficient btree
t = long(time.time()*1000)
if not static:
static.extend([t, {}])
else:
if static[0] != t:
static[0] = t
static[1] = {}
while 1:
rand = random.getrandbits(23)
if rand not in static[1]:
static[1][rand] = True
break
x = rand << 41 | t
# turn into a signed long
if x >= 9223372036854775808L:
x -= 18446744073709551616L
return x
def hexifyID(id):
if id < 0:
id += 18446744073709551616L
return "%x" % id
def dehexifyID(id):
id = int(id, 16)
if id >= 9223372036854775808L:
id -= 18446744073709551616L
return id
def ids2str(ids):
"""Given a list of integers, return a string '(int1,int2,.)'
The caller is responsible for ensuring only integers are provided.
This is safe if you use sqlite primary key columns, which are guaranteed
to be integers."""
return "(%s)" % ",".join([str(i) for i in ids])
# Tags
##############################################################################
def parseTags(tags):
"Parse a string and return a list of tags."
tags = re.split(" |, ?", tags)
return [t.strip() for t in tags if t.strip()]
def joinTags(tags):
return u" ".join(tags)
def canonifyTags(tags):
"Strip leading/trailing/superfluous commas and duplicates."
tags = [t.lstrip(":") for t in set(parseTags(tags))]
return joinTags(sorted(tags))
def findTag(tag, tags):
"True if TAG is in TAGS. Ignore case."
if not isinstance(tags, types.ListType):
tags = parseTags(tags)
return tag.lower() in [t.lower() for t in tags]
def addTags(tagstr, tags):
"Add tags if they don't exist."
currentTags = parseTags(tags)
for tag in parseTags(tagstr):
if not findTag(tag, currentTags):
currentTags.append(tag)
return joinTags(currentTags)
def deleteTags(tagstr, tags):
"Delete tags if they don't exists."
currentTags = parseTags(tags)
for tag in parseTags(tagstr):
try:
currentTags.remove(tag)
except ValueError:
pass
return joinTags(currentTags)
# Misc
##############################################################################
def checksum(data):
return md5(data).hexdigest()
def call(argv, wait=True, **kwargs):
try:
o = subprocess.Popen(argv, **kwargs)
except OSError:
# command not found
return -1
if wait:
while 1:
try:
ret = o.wait()
except OSError:
# interrupted system call
continue
break
else:
ret = 0
return ret
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/utils.py
|
utils.py
|
__docformat__ = 'restructuredtext'
try:
from pysqlite2 import dbapi2 as sqlite
except ImportError:
try:
from sqlite3 import dbapi2 as sqlite
except:
raise Exception("Please install pysqlite2 or python2.5")
from sqlalchemy import (Table, Integer, Float, Column, MetaData,
ForeignKey, Boolean, String, Date,
UniqueConstraint, Index, PrimaryKeyConstraint)
from sqlalchemy import create_engine
from sqlalchemy.orm import mapper, sessionmaker as _sessionmaker, relation, backref, \
object_session as _object_session, class_mapper
from sqlalchemy.sql import select, text, and_
from sqlalchemy.exc import DBAPIError, OperationalError
from sqlalchemy.pool import NullPool
import sqlalchemy
# some users are still on 0.4.x..
import warnings
warnings.filterwarnings('ignore', 'Use session.add()')
warnings.filterwarnings('ignore', 'Use session.expunge_all()')
# sqlalchemy didn't handle the move to unicodetext nicely
try:
from sqlalchemy import UnicodeText
except ImportError:
from sqlalchemy import Unicode
UnicodeText = Unicode
from oldanki.hooks import runHook
# shared metadata
metadata = MetaData()
# this class assumes the provided session is called with transactional=False
class SessionHelper(object):
"Add some convenience routines to a session."
def __init__(self, session, lock=False, transaction=True):
self._session = session
self._lock = lock
self._transaction = transaction
if self._transaction:
self._session.begin()
if self._lock:
self._lockDB()
self._seen = True
def save(self, obj):
# compat
if sqlalchemy.__version__.startswith("0.4."):
self._session.save(obj)
else:
self._session.add(obj)
def clear(self):
# compat
if sqlalchemy.__version__.startswith("0.4."):
self._session.clear()
else:
self._session.expunge_all()
def update(self, obj):
# compat
if sqlalchemy.__version__.startswith("0.4."):
self._session.update(obj)
else:
self._session.add(obj)
def execute(self, *a, **ka):
x = self._session.execute(*a, **ka)
runHook("dbFinished")
return x
def __getattr__(self, k):
return getattr(self.__dict__['_session'], k)
def scalar(self, sql, **args):
return self.execute(text(sql), args).scalar()
def all(self, sql, **args):
return self.execute(text(sql), args).fetchall()
def first(self, sql, **args):
c = self.execute(text(sql), args)
r = c.fetchone()
c.close()
return r
def column0(self, sql, **args):
return [x[0] for x in self.execute(text(sql), args).fetchall()]
def statement(self, sql, **kwargs):
"Execute a statement without returning any results. Flush first."
return self.execute(text(sql), kwargs)
def statements(self, sql, data):
"Execute a statement across data. Flush first."
return self.execute(text(sql), data)
def __repr__(self):
return repr(self._session)
def commit(self):
self._session.commit()
if self._transaction:
self._session.begin()
if self._lock:
self._lockDB()
def _lockDB(self):
"Take out a write lock."
self._session.execute(text("update decks set modified=modified"))
def object_session(*args):
s = _object_session(*args)
if s:
return SessionHelper(s, transaction=False)
return None
def sessionmaker(*args, **kwargs):
if sqlalchemy.__version__ < "0.5":
if 'autocommit' in kwargs:
kwargs['transactional'] = not kwargs['autocommit']
del kwargs['autocommit']
else:
if 'transactional' in kwargs:
kwargs['autocommit'] = not kwargs['transactional']
del kwargs['transactional']
return _sessionmaker(*args, **kwargs)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/db.py
|
db.py
|
__docformat__ = 'restructuredtext'
import time
from oldanki.db import *
from oldanki.errors import *
from oldanki.models import Model, FieldModel, fieldModelsTable
from oldanki.utils import genID, stripHTMLMedia
from oldanki.hooks import runHook
# Fields in a fact
##########################################################################
fieldsTable = Table(
'fields', metadata,
Column('id', Integer, primary_key=True),
Column('factId', Integer, ForeignKey("facts.id"), nullable=False),
Column('fieldModelId', Integer, ForeignKey("fieldModels.id"),
nullable=False),
Column('ordinal', Integer, nullable=False),
Column('value', UnicodeText, nullable=False))
class Field(object):
"A field in a fact."
def __init__(self, fieldModel=None):
if fieldModel:
self.fieldModel = fieldModel
self.ordinal = fieldModel.ordinal
self.value = u""
self.id = genID()
def getName(self):
return self.fieldModel.name
name = property(getName)
mapper(Field, fieldsTable, properties={
'fieldModel': relation(FieldModel)
})
# Facts: a set of fields and a model
##########################################################################
# mapped in cards.py
factsTable = Table(
'facts', metadata,
Column('id', Integer, primary_key=True),
Column('modelId', Integer, ForeignKey("models.id"), nullable=False),
Column('created', Float, nullable=False, default=time.time),
Column('modified', Float, nullable=False, default=time.time),
Column('tags', UnicodeText, nullable=False, default=u""),
# spaceUntil is reused as a html-stripped cache of the fields
Column('spaceUntil', UnicodeText, nullable=False, default=u""),
# obsolete
Column('lastCardId', Integer, ForeignKey(
"cards.id", use_alter=True, name="lastCardIdfk")))
class Fact(object):
"A single fact. Fields exposed as dict interface."
def __init__(self, model=None):
self.model = model
self.id = genID()
if model:
for fm in model.fieldModels:
self.fields.append(Field(fm))
self.new = True
def isNew(self):
return getattr(self, 'new', False)
def keys(self):
return [field.name for field in self.fields]
def values(self):
return [field.value for field in self.fields]
def __getitem__(self, key):
try:
return [f.value for f in self.fields if f.name == key][0]
except IndexError:
raise KeyError(key)
def __setitem__(self, key, value):
try:
[f for f in self.fields if f.name == key][0].value = value
except IndexError:
raise KeyError
def get(self, key, default):
try:
return self[key]
except (IndexError, KeyError):
return default
def assertValid(self):
"Raise an error if required fields are empty."
for field in self.fields:
if not self.fieldValid(field):
raise FactInvalidError(type="fieldEmpty",
field=field.name)
def fieldValid(self, field):
return not (field.fieldModel.required and not field.value.strip())
def assertUnique(self, s):
"Raise an error if duplicate fields are found."
for field in self.fields:
if not self.fieldUnique(field, s):
raise FactInvalidError(type="fieldNotUnique",
field=field.name)
def fieldUnique(self, field, s):
if not field.fieldModel.unique:
return True
req = ("select value from fields "
"where fieldModelId = :fmid and value = :val")
if field.id:
req += " and id != %s" % field.id
return not s.scalar(req, val=field.value, fmid=field.fieldModel.id)
def focusLost(self, field):
runHook('fact.focusLost', self, field)
def setModified(self, textChanged=False, deck=None, media=True):
"Mark modified and update cards."
self.modified = time.time()
if textChanged:
if not deck:
# FIXME: compat code
import ankiqt
if not getattr(ankiqt, 'setModWarningShown', None):
import sys; sys.stderr.write(
"plugin needs to pass deck to fact.setModified()")
ankiqt.setModWarningShown = True
deck = ankiqt.mw.deck
assert deck
self.spaceUntil = stripHTMLMedia(u" ".join(
self.values()))
for card in self.cards:
card.rebuildQA(deck)
# Fact deletions
##########################################################################
factsDeletedTable = Table(
'factsDeleted', metadata,
Column('factId', Integer, ForeignKey("facts.id"),
nullable=False),
Column('deletedTime', Float, nullable=False))
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/facts.py
|
facts.py
|
__docformat__ = 'restructuredtext'
import itertools, time, re, os, HTMLParser
from operator import itemgetter
from oldanki import DeckStorage
from oldanki.cards import Card
from oldanki.sync import SyncClient, SyncServer, copyLocalMedia
from oldanki.lang import _
from oldanki.utils import findTag, parseTags, stripHTML, ids2str
from oldanki.tags import tagIds
from oldanki.db import *
class Exporter(object):
def __init__(self, deck):
self.deck = deck
self.limitTags = []
self.limitCardIds = []
def exportInto(self, path):
self._escapeCount = 0
file = open(path, "wb")
self.doExport(file)
file.close()
def escapeText(self, text, removeFields=False):
"Escape newlines and tabs, and strip Anki HTML."
from BeautifulSoup import BeautifulSoup as BS
text = text.replace("\n", "<br>")
text = text.replace("\t", " " * 8)
if removeFields:
# beautifulsoup is slow
self._escapeCount += 1
if self._escapeCount % 100 == 0:
self.deck.updateProgress()
try:
s = BS(text)
all = s('span', {'class': re.compile("fm.*")})
for e in all:
e.replaceWith("".join([unicode(x) for x in e.contents]))
text = unicode(s)
except HTMLParser.HTMLParseError:
pass
return text
def cardIds(self):
"Return all cards, limited by tags or provided ids."
if self.limitCardIds:
return self.limitCardIds
if not self.limitTags:
cards = self.deck.s.column0("select id from cards")
else:
d = tagIds(self.deck.s, self.limitTags, create=False)
cards = self.deck.s.column0(
"select cardId from cardTags where tagid in %s" %
ids2str(d.values()))
self.count = len(cards)
return cards
class AnkiExporter(Exporter):
key = _("Anki Deck (*.oldanki)")
ext = ".oldanki"
def __init__(self, deck):
Exporter.__init__(self, deck)
self.includeSchedulingInfo = False
self.includeMedia = True
def exportInto(self, path):
n = 3
if not self.includeSchedulingInfo:
n += 1
self.deck.startProgress(n)
self.deck.updateProgress(_("Exporting..."))
try:
os.unlink(path)
except (IOError, OSError):
pass
self.newDeck = DeckStorage.Deck(path)
client = SyncClient(self.deck)
server = SyncServer(self.newDeck)
client.setServer(server)
client.localTime = self.deck.modified
client.remoteTime = 0
self.deck.s.flush()
# set up a custom change list and sync
lsum = self.localSummary()
rsum = server.summary(0)
self.deck.updateProgress()
payload = client.genPayload((lsum, rsum))
self.deck.updateProgress()
res = server.applyPayload(payload)
if not self.includeSchedulingInfo:
self.deck.updateProgress()
self.newDeck.s.statement("""
delete from reviewHistory""")
self.newDeck.s.statement("""
update cards set
interval = 0,
lastInterval = 0,
due = created,
lastDue = 0,
factor = 2.5,
firstAnswered = 0,
reps = 0,
successive = 0,
averageTime = 0,
reviewTime = 0,
youngEase0 = 0,
youngEase1 = 0,
youngEase2 = 0,
youngEase3 = 0,
youngEase4 = 0,
matureEase0 = 0,
matureEase1 = 0,
matureEase2 = 0,
matureEase3 = 0,
matureEase4 = 0,
yesCount = 0,
noCount = 0,
spaceUntil = 0,
type = 2,
relativeDelay = 2,
combinedDue = created,
modified = :now
""", now=time.time())
self.newDeck.s.statement("""
delete from stats""")
# media
if self.includeMedia:
server.deck.mediaPrefix = ""
copyLocalMedia(client.deck, server.deck)
# need to save manually
self.newDeck.rebuildCounts()
self.newDeck.updateAllPriorities()
self.exportedCards = self.newDeck.cardCount
self.newDeck.utcOffset = -1
self.newDeck.s.commit()
self.newDeck.close()
self.deck.finishProgress()
def localSummary(self):
cardIds = self.cardIds()
cStrIds = ids2str(cardIds)
cards = self.deck.s.all("""
select id, modified from cards
where id in %s""" % cStrIds)
facts = self.deck.s.all("""
select facts.id, facts.modified from cards, facts where
facts.id = cards.factId and
cards.id in %s""" % cStrIds)
models = self.deck.s.all("""
select models.id, models.modified from models, facts where
facts.modelId = models.id and
facts.id in %s""" % ids2str([f[0] for f in facts]))
media = self.deck.s.all("""
select id, created from media""")
return {
# cards
"cards": cards,
"delcards": [],
# facts
"facts": facts,
"delfacts": [],
# models
"models": models,
"delmodels": [],
# media
"media": media,
"delmedia": [],
}
class TextCardExporter(Exporter):
key = _("Text files (*.txt)")
ext = ".txt"
def __init__(self, deck):
Exporter.__init__(self, deck)
self.includeTags = False
def doExport(self, file):
ids = self.cardIds()
strids = ids2str(ids)
self.deck.startProgress((len(ids) + 1) / 50)
self.deck.updateProgress(_("Exporting..."))
cards = self.deck.s.all("""
select cards.question, cards.answer, cards.id from cards
where cards.id in %s
order by cards.created""" % strids)
self.deck.updateProgress()
if self.includeTags:
self.cardTags = dict(self.deck.s.all("""
select cards.id, facts.tags from cards, facts
where cards.factId = facts.id
and cards.id in %s
order by cards.created""" % strids))
out = u"\n".join(["%s\t%s%s" % (
self.escapeText(c[0], removeFields=True),
self.escapeText(c[1], removeFields=True),
self.tags(c[2]))
for c in cards])
if out:
out += "\n"
file.write(out.encode("utf-8"))
self.deck.finishProgress()
def tags(self, id):
if self.includeTags:
return "\t" + ", ".join(parseTags(self.cardTags[id]))
return ""
class TextFactExporter(Exporter):
key = _("Text files (*.txt)")
ext = ".txt"
def __init__(self, deck):
Exporter.__init__(self, deck)
self.includeTags = False
def doExport(self, file):
cardIds = self.cardIds()
self.deck.startProgress()
self.deck.updateProgress(_("Exporting..."))
facts = self.deck.s.all("""
select factId, value, facts.created from facts, fields
where
facts.id in
(select distinct factId from cards
where cards.id in %s)
and facts.id = fields.factId
order by factId, ordinal""" % ids2str(cardIds))
txt = ""
self.deck.updateProgress()
if self.includeTags:
self.factTags = dict(self.deck.s.all(
"select id, tags from facts where id in %s" %
ids2str([fact[0] for fact in facts])))
groups = itertools.groupby(facts, itemgetter(0))
groups = [[x for x in y[1]] for y in groups]
groups = [(group[0][2],
"\t".join([self.escapeText(x[1]) for x in group]) +
self.tags(group[0][0]))
for group in groups]
self.deck.updateProgress()
groups.sort(key=itemgetter(0))
out = [ret[1] for ret in groups]
self.count = len(out)
out = "\n".join(out)
file.write(out.encode("utf-8"))
self.deck.finishProgress()
def tags(self, id):
if self.includeTags:
return "\t" + self.factTags[id]
return ""
# Export modules
##########################################################################
def exporters():
return (
(_("Anki Deck (*.oldanki)"), AnkiExporter),
(_("Cards in tab-separated text file (*.txt)"), TextCardExporter),
(_("Facts in tab-separated text file (*.txt)"), TextFactExporter))
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/exporting.py
|
exporting.py
|
import time, re
from sqlalchemy.ext.orderinglist import ordering_list
from oldanki.db import *
from oldanki.utils import genID, canonifyTags
from oldanki.fonts import toPlatformFont
from oldanki.utils import parseTags, hexifyID, checksum, stripHTML
from oldanki.lang import _
from oldanki.hooks import runFilter
from oldanki.template import render
from copy import copy
def alignmentLabels():
return {
0: _("Center"),
1: _("Left"),
2: _("Right"),
}
# Field models
##########################################################################
fieldModelsTable = Table(
'fieldModels', metadata,
Column('id', Integer, primary_key=True),
Column('ordinal', Integer, nullable=False),
Column('modelId', Integer, ForeignKey('models.id'), nullable=False),
Column('name', UnicodeText, nullable=False),
Column('description', UnicodeText, nullable=False, default=u""), # obsolete
# reused as RTL marker
Column('features', UnicodeText, nullable=False, default=u""),
Column('required', Boolean, nullable=False, default=True),
Column('unique', Boolean, nullable=False, default=True), # sqlite keyword
Column('numeric', Boolean, nullable=False, default=False),
# display
Column('quizFontFamily', UnicodeText, default=u"Arial"),
Column('quizFontSize', Integer, default=20),
Column('quizFontColour', String(7)),
Column('editFontFamily', UnicodeText, default=u"1"), # reused as <pre> toggle
Column('editFontSize', Integer, default=20))
class FieldModel(object):
"The definition of one field in a fact."
def __init__(self, name=u"", required=True, unique=True):
self.name = name
self.required = required
self.unique = unique
self.id = genID()
def copy(self):
new = FieldModel()
for p in class_mapper(FieldModel).iterate_properties:
setattr(new, p.key, getattr(self, p.key))
new.id = genID()
new.model = None
return new
mapper(FieldModel, fieldModelsTable)
# Card models
##########################################################################
cardModelsTable = Table(
'cardModels', metadata,
Column('id', Integer, primary_key=True),
Column('ordinal', Integer, nullable=False),
Column('modelId', Integer, ForeignKey('models.id'), nullable=False),
Column('name', UnicodeText, nullable=False),
Column('description', UnicodeText, nullable=False, default=u""), # obsolete
Column('active', Boolean, nullable=False, default=True),
# formats: question/answer/last(not used)
Column('qformat', UnicodeText, nullable=False),
Column('aformat', UnicodeText, nullable=False),
Column('lformat', UnicodeText),
# question/answer editor format (not used yet)
Column('qedformat', UnicodeText),
Column('aedformat', UnicodeText),
Column('questionInAnswer', Boolean, nullable=False, default=False),
# unused
Column('questionFontFamily', UnicodeText, default=u"Arial"),
Column('questionFontSize', Integer, default=20),
Column('questionFontColour', String(7), default=u"#000000"),
# used for both question & answer
Column('questionAlign', Integer, default=0),
# ununsed
Column('answerFontFamily', UnicodeText, default=u"Arial"),
Column('answerFontSize', Integer, default=20),
Column('answerFontColour', String(7), default=u"#000000"),
Column('answerAlign', Integer, default=0),
Column('lastFontFamily', UnicodeText, default=u"Arial"),
Column('lastFontSize', Integer, default=20),
# used as background colour
Column('lastFontColour', String(7), default=u"#FFFFFF"),
Column('editQuestionFontFamily', UnicodeText, default=None),
Column('editQuestionFontSize', Integer, default=None),
Column('editAnswerFontFamily', UnicodeText, default=None),
Column('editAnswerFontSize', Integer, default=None),
# empty answer
Column('allowEmptyAnswer', Boolean, nullable=False, default=True),
Column('typeAnswer', UnicodeText, nullable=False, default=u""))
class CardModel(object):
"""Represents how to generate the front and back of a card."""
def __init__(self, name=u"", qformat=u"q", aformat=u"a", active=True):
self.name = name
self.qformat = qformat
self.aformat = aformat
self.active = active
self.id = genID()
def copy(self):
new = CardModel()
for p in class_mapper(CardModel).iterate_properties:
setattr(new, p.key, getattr(self, p.key))
new.id = genID()
new.model = None
return new
mapper(CardModel, cardModelsTable)
def formatQA(cid, mid, fact, tags, cm, deck):
"Return a dict of {id, question, answer}"
d = {'id': cid}
fields = {}
for (k, v) in fact.items():
fields["text:"+k] = stripHTML(v[1])
if v[1]:
fields[k] = '<span class="fm%s">%s</span>' % (
hexifyID(v[0]), v[1])
else:
fields[k] = u""
fields['tags'] = tags[0]
fields['Tags'] = tags[0]
fields['modelTags'] = tags[1]
fields['cardModel'] = tags[2]
# render q & a
ret = []
for (type, format) in (("question", cm.qformat),
("answer", cm.aformat)):
# convert old style
format = re.sub("%\((.+?)\)s", "{{\\1}}", format)
# allow custom rendering functions & info
fields = runFilter("prepareFields", fields, cid, mid, fact, tags, cm, deck)
html = render(format, fields)
d[type] = runFilter("formatQA", html, type, cid, mid, fact, tags, cm, deck)
return d
# Model table
##########################################################################
modelsTable = Table(
'models', metadata,
Column('id', Integer, primary_key=True),
Column('deckId', Integer, ForeignKey("decks.id", use_alter=True, name="deckIdfk")),
Column('created', Float, nullable=False, default=time.time),
Column('modified', Float, nullable=False, default=time.time),
Column('tags', UnicodeText, nullable=False, default=u""),
Column('name', UnicodeText, nullable=False),
Column('description', UnicodeText, nullable=False, default=u""), # obsolete
Column('features', UnicodeText, nullable=False, default=u""), # used as mediaURL
Column('spacing', Float, nullable=False, default=0.1), # obsolete
Column('initialSpacing', Float, nullable=False, default=60), # obsolete
Column('source', Integer, nullable=False, default=0))
class Model(object):
"Defines the way a fact behaves, what fields it can contain, etc."
def __init__(self, name=u""):
self.name = name
self.id = genID()
def setModified(self):
self.modified = time.time()
def addFieldModel(self, field):
"Add a field model."
self.fieldModels.append(field)
s = object_session(self)
if s:
s.flush()
def addCardModel(self, card):
"Add a card model."
self.cardModels.append(card)
s = object_session(self)
if s:
s.flush()
mapper(Model, modelsTable, properties={
'fieldModels': relation(FieldModel, backref='model',
collection_class=ordering_list('ordinal'),
order_by=[fieldModelsTable.c.ordinal],
cascade="all, delete-orphan"),
'cardModels': relation(CardModel, backref='model',
collection_class=ordering_list('ordinal'),
order_by=[cardModelsTable.c.ordinal],
cascade="all, delete-orphan"),
})
# Model deletions
##########################################################################
modelsDeletedTable = Table(
'modelsDeleted', metadata,
Column('modelId', Integer, ForeignKey("models.id"),
nullable=False),
Column('deletedTime', Float, nullable=False))
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/models.py
|
models.py
|
__docformat__ = 'restructuredtext'
import os, shutil, re, urllib2, time, tempfile, unicodedata, urllib
from oldanki.db import *
from oldanki.utils import checksum, genID
from oldanki.lang import _
# other code depends on this order, so don't reorder
regexps = ("(?i)(\[sound:([^]]+)\])",
"(?i)(<img[^>]+src=[\"']?([^\"'>]+)[\"']?[^>]*>)")
# Tables
##########################################################################
mediaTable = Table(
'media', metadata,
Column('id', Integer, primary_key=True, nullable=False),
Column('filename', UnicodeText, nullable=False),
# reused as reference count
Column('size', Integer, nullable=False),
# treated as modification date, not creation date
Column('created', Float, nullable=False),
# reused as md5sum. empty string if file doesn't exist on disk
Column('originalPath', UnicodeText, nullable=False, default=u""),
# older versions stored original filename here, so we'll leave it for now
# in case we add a feature to rename media back to its original name. in
# the future we may want to zero this to save space
Column('description', UnicodeText, nullable=False, default=u""))
class Media(object):
pass
mapper(Media, mediaTable)
mediaDeletedTable = Table(
'mediaDeleted', metadata,
Column('mediaId', Integer, ForeignKey("cards.id"),
nullable=False),
Column('deletedTime', Float, nullable=False))
# File handling
##########################################################################
def copyToMedia(deck, path):
"""Copy PATH to MEDIADIR, and return new filename.
If a file with the same md5sum exists in the DB, return that.
If a file with the same name exists, return a unique name.
This does not modify the media table."""
# see if have duplicate contents
newpath = deck.s.scalar(
"select filename from media where originalPath = :cs",
cs=checksum(open(path, "rb").read()))
# check if this filename already exists
if not newpath:
base = os.path.basename(path)
mdir = deck.mediaDir(create=True)
newpath = uniquePath(mdir, base)
shutil.copy2(path, newpath)
return os.path.basename(newpath)
def uniquePath(dir, base):
# remove any dangerous characters
base = re.sub(r"[][<>:/\\&]", "", base)
# find a unique name
(root, ext) = os.path.splitext(base)
def repl(match):
n = int(match.group(1))
return " (%d)" % (n+1)
while True:
path = os.path.join(dir, root + ext)
if not os.path.exists(path):
break
reg = " \((\d+)\)$"
if not re.search(reg, root):
root = root + " (1)"
else:
root = re.sub(reg, repl, root)
return path
# DB routines
##########################################################################
def updateMediaCount(deck, file, count=1):
mdir = deck.mediaDir()
if deck.s.scalar(
"select 1 from media where filename = :file", file=file):
deck.s.statement(
"update media set size = size + :c, created = :t where filename = :file",
file=file, c=count, t=time.time())
elif count > 0:
try:
sum = unicode(
checksum(open(os.path.join(mdir, file), "rb").read()))
except:
sum = u""
deck.s.statement("""
insert into media (id, filename, size, created, originalPath, description)
values (:id, :file, :c, :mod, :sum, '')""",
id=genID(), file=file, c=count, mod=time.time(),
sum=sum)
def removeUnusedMedia(deck):
ids = deck.s.column0("select id from media where size = 0")
for id in ids:
deck.s.statement("insert into mediaDeleted values (:id, :t)",
id=id, t=time.time())
deck.s.statement("delete from media where size = 0")
# String manipulation
##########################################################################
def mediaFiles(string, remote=False):
l = []
for reg in regexps:
for (full, fname) in re.findall(reg, string):
isLocal = not re.match("(https?|ftp)://", fname.lower())
if not remote and isLocal:
l.append(fname)
elif remote and not isLocal:
l.append(fname)
return l
def stripMedia(txt):
for reg in regexps:
txt = re.sub(reg, "", txt)
return txt
def escapeImages(string):
def repl(match):
tag = match.group(1)
fname = match.group(2)
if re.match("(https?|ftp)://", fname):
return tag
return tag.replace(
fname, urllib.quote(fname.encode("utf-8")))
return re.sub(regexps[1], repl, string)
# Rebuilding DB
##########################################################################
def rebuildMediaDir(deck, delete=False, dirty=True):
mdir = deck.mediaDir()
if not mdir:
return (0, 0)
deck.startProgress(title=_("Check Media DB"))
# set all ref counts to 0
deck.s.statement("update media set size = 0")
# look through cards for media references
refs = {}
normrefs = {}
def norm(s):
if isinstance(s, unicode):
return unicodedata.normalize('NFD', s)
return s
for (question, answer) in deck.s.all(
"select question, answer from cards"):
for txt in (question, answer):
for f in mediaFiles(txt):
if f in refs:
refs[f] += 1
else:
refs[f] = 1
normrefs[norm(f)] = True
# update ref counts
for (file, count) in refs.items():
updateMediaCount(deck, file, count)
# find unused media
unused = []
for file in os.listdir(mdir):
path = os.path.join(mdir, file)
if not os.path.isfile(path):
# ignore directories
continue
nfile = norm(file)
if nfile not in normrefs:
unused.append(file)
# optionally delete
if delete:
for f in unused:
path = os.path.join(mdir, f)
os.unlink(path)
# remove entries in db for unused media
removeUnusedMedia(deck)
# check md5s are up to date
update = []
for (file, created, md5) in deck.s.all(
"select filename, created, originalPath from media"):
path = os.path.join(mdir, file)
if not os.path.exists(path):
if md5:
update.append({'f':file, 'sum':u"", 'c':time.time()})
else:
sum = unicode(
checksum(open(os.path.join(mdir, file), "rb").read()))
if md5 != sum:
update.append({'f':file, 'sum':sum, 'c':time.time()})
if update:
deck.s.statements("""
update media set originalPath = :sum, created = :c where filename = :f""",
update)
# update deck and get return info
if dirty:
deck.flushMod()
nohave = deck.s.column0("select filename from media where originalPath = ''")
deck.finishProgress()
return (nohave, unused)
# Download missing
##########################################################################
def downloadMissing(deck):
urlbase = deck.getVar("mediaURL")
if not urlbase:
return None
mdir = deck.mediaDir(create=True)
deck.startProgress()
missing = 0
grabbed = 0
for c, (f, sum) in enumerate(deck.s.all(
"select filename, originalPath from media")):
path = os.path.join(mdir, f)
if not os.path.exists(path):
try:
rpath = urlbase + f
url = urllib2.urlopen(rpath)
open(f, "wb").write(url.read())
grabbed += 1
except:
if sum:
# the file is supposed to exist
deck.finishProgress()
return (False, rpath)
else:
# ignore and keep going
missing += 1
deck.updateProgress(label=_("File %d...") % (grabbed+missing))
deck.finishProgress()
return (True, grabbed, missing)
# Convert remote links to local ones
##########################################################################
def downloadRemote(deck):
mdir = deck.mediaDir(create=True)
refs = {}
deck.startProgress()
for (question, answer) in deck.s.all(
"select question, answer from cards"):
for txt in (question, answer):
for f in mediaFiles(txt, remote=True):
refs[f] = True
tmpdir = tempfile.mkdtemp(prefix="oldanki")
failed = []
passed = []
for c, link in enumerate(refs.keys()):
try:
path = os.path.join(tmpdir, os.path.basename(link))
url = urllib2.urlopen(link)
open(path, "wb").write(url.read())
newpath = copyToMedia(deck, path)
passed.append([link, newpath])
except:
failed.append(link)
deck.updateProgress(label=_("Download %d...") % c)
for (url, name) in passed:
deck.s.statement(
"update fields set value = replace(value, :url, :name)",
url=url, name=name)
deck.updateProgress(label=_("Updating references..."))
deck.updateProgress(label=_("Updating cards..."))
# rebuild entire q/a cache
for m in deck.models:
deck.updateCardsFromModel(m, dirty=True)
deck.finishProgress()
deck.flushMod()
return (passed, failed)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/media.py
|
media.py
|
__docformat__ = 'restructuredtext'
import tempfile, time, os, random, sys, re, stat, shutil
import types, traceback, datetime
from anki.utils import json as simplejson
from oldanki.db import *
from oldanki.lang import _, ngettext
from oldanki.errors import DeckAccessError
from oldanki.stdmodels import BasicModel
from oldanki.utils import parseTags, tidyHTML, genID, ids2str, hexifyID, \
canonifyTags, joinTags, addTags, checksum
from oldanki.history import CardHistoryEntry
from oldanki.models import Model, CardModel, formatQA
from oldanki.stats import dailyStats, globalStats, genToday
from oldanki.fonts import toPlatformFont
from oldanki.tags import initTagTables, tagIds
from operator import itemgetter
from itertools import groupby
from oldanki.hooks import runHook, hookEmpty
from oldanki.template import render
from oldanki.media import updateMediaCount, mediaFiles, \
rebuildMediaDir
import oldanki.latex # sets up hook
# ensure all the DB metadata in other files is loaded before proceeding
import oldanki.models, oldanki.facts, oldanki.cards, oldanki.stats
import oldanki.history, oldanki.media
# the current code set type -= 3 for manually suspended cards, and += 3*n
# for temporary suspends, (where n=1 for bury, n=2 for review/cram).
# This way we don't need to recalculate priorities when enabling the cards
# again, and paves the way for an arbitrary number of priorities in the
# future. But until all clients are upgraded, we need to keep munging the
# priorities to prevent older clients from getting confused
# PRIORITY_REVEARLY = -1
# PRIORITY_BURIED = -2
# PRIORITY_SUSPENDED = -3
# priorities
PRIORITY_HIGH = 4
PRIORITY_MED = 3
PRIORITY_NORM = 2
PRIORITY_LOW = 1
PRIORITY_NONE = 0
# rest
MATURE_THRESHOLD = 21
NEW_CARDS_DISTRIBUTE = 0
NEW_CARDS_LAST = 1
NEW_CARDS_FIRST = 2
NEW_CARDS_RANDOM = 0
NEW_CARDS_OLD_FIRST = 1
NEW_CARDS_NEW_FIRST = 2
REV_CARDS_OLD_FIRST = 0
REV_CARDS_NEW_FIRST = 1
REV_CARDS_DUE_FIRST = 2
REV_CARDS_RANDOM = 3
SEARCH_TAG = 0
SEARCH_TYPE = 1
SEARCH_PHRASE = 2
SEARCH_FID = 3
SEARCH_CARD = 4
SEARCH_DISTINCT = 5
SEARCH_FIELD = 6
SEARCH_FIELD_EXISTS = 7
SEARCH_QA = 8
SEARCH_PHRASE_WB = 9
DECK_VERSION = 65
deckVarsTable = Table(
'deckVars', metadata,
Column('key', UnicodeText, nullable=False, primary_key=True),
Column('value', UnicodeText))
# parts of the code assume we only have one deck
decksTable = Table(
'decks', metadata,
Column('id', Integer, primary_key=True),
Column('created', Float, nullable=False, default=time.time),
Column('modified', Float, nullable=False, default=time.time),
Column('description', UnicodeText, nullable=False, default=u""),
Column('version', Integer, nullable=False, default=DECK_VERSION),
Column('currentModelId', Integer, ForeignKey("models.id")),
# syncName stores an md5sum of the deck path when syncing is enabled. If
# it doesn't match the current deck path, the deck has been moved,
# and syncing is disabled on load.
Column('syncName', UnicodeText),
Column('lastSync', Float, nullable=False, default=0),
# scheduling
##############
# initial intervals
Column('hardIntervalMin', Float, nullable=False, default=1.0),
Column('hardIntervalMax', Float, nullable=False, default=1.1),
Column('midIntervalMin', Float, nullable=False, default=3.0),
Column('midIntervalMax', Float, nullable=False, default=5.0),
Column('easyIntervalMin', Float, nullable=False, default=7.0),
Column('easyIntervalMax', Float, nullable=False, default=9.0),
# delays on failure
Column('delay0', Integer, nullable=False, default=600),
# days to delay mature fails
Column('delay1', Integer, nullable=False, default=0),
Column('delay2', Float, nullable=False, default=0.0),
# collapsing future cards
Column('collapseTime', Integer, nullable=False, default=1),
# priorities & postponing
Column('highPriority', UnicodeText, nullable=False, default=u"PriorityVeryHigh"),
Column('medPriority', UnicodeText, nullable=False, default=u"PriorityHigh"),
Column('lowPriority', UnicodeText, nullable=False, default=u"PriorityLow"),
Column('suspended', UnicodeText, nullable=False, default=u""), # obsolete
# 0 is random, 1 is by input date
Column('newCardOrder', Integer, nullable=False, default=1),
# when to show new cards
Column('newCardSpacing', Integer, nullable=False, default=NEW_CARDS_DISTRIBUTE),
# limit the number of failed cards in play
Column('failedCardMax', Integer, nullable=False, default=20),
# number of new cards to show per day
Column('newCardsPerDay', Integer, nullable=False, default=20),
# currently unused
Column('sessionRepLimit', Integer, nullable=False, default=0),
Column('sessionTimeLimit', Integer, nullable=False, default=600),
# stats offset
Column('utcOffset', Float, nullable=False, default=-1),
# count cache
Column('cardCount', Integer, nullable=False, default=0),
Column('factCount', Integer, nullable=False, default=0),
Column('failedNowCount', Integer, nullable=False, default=0), # obsolete
Column('failedSoonCount', Integer, nullable=False, default=0),
Column('revCount', Integer, nullable=False, default=0),
Column('newCount', Integer, nullable=False, default=0),
# rev order
Column('revCardOrder', Integer, nullable=False, default=0))
class Deck(object):
"Top-level object. Manages facts, cards and scheduling information."
factorFour = 1.3
initialFactor = 2.5
minimumAverage = 1.7
maxScheduleTime = 36500
def __init__(self, path=None):
"Create a new deck."
# a limit of 1 deck in the table
self.id = 1
# db session factory and instance
self.Session = None
self.s = None
def _initVars(self):
self.tmpMediaDir = None
self.mediaPrefix = ""
self.lastTags = u""
self.lastLoaded = time.time()
self.undoEnabled = False
self.sessionStartReps = 0
self.sessionStartTime = 0
self.lastSessionStart = 0
self.queueLimit = 200
# if most recent deck var not defined, make sure defaults are set
if not self.s.scalar("select 1 from deckVars where key = 'revSpacing'"):
self.setVarDefault("suspendLeeches", True)
self.setVarDefault("leechFails", 16)
self.setVarDefault("perDay", True)
self.setVarDefault("newActive", "")
self.setVarDefault("revActive", "")
self.setVarDefault("newInactive", self.suspended)
self.setVarDefault("revInactive", self.suspended)
self.setVarDefault("newSpacing", 60)
self.setVarDefault("mediaURL", "")
self.setVarDefault("latexPre", """\
\\documentclass[12pt]{article}
\\special{papersize=3in,5in}
\\usepackage[utf8]{inputenc}
\\usepackage{amssymb,amsmath}
\\pagestyle{empty}
\\setlength{\\parindent}{0in}
\\begin{document}
""")
self.setVarDefault("latexPost", "\\end{document}")
self.setVarDefault("revSpacing", 0.1)
self.updateCutoff()
self.setupStandardScheduler()
def modifiedSinceSave(self):
return self.modified > self.lastLoaded
# Queue management
##########################################################################
def setupStandardScheduler(self):
self.getCardId = self._getCardId
self.fillFailedQueue = self._fillFailedQueue
self.fillRevQueue = self._fillRevQueue
self.fillNewQueue = self._fillNewQueue
self.rebuildFailedCount = self._rebuildFailedCount
self.rebuildRevCount = self._rebuildRevCount
self.rebuildNewCount = self._rebuildNewCount
self.requeueCard = self._requeueCard
self.timeForNewCard = self._timeForNewCard
self.updateNewCountToday = self._updateNewCountToday
self.cardQueue = self._cardQueue
self.finishScheduler = None
self.answerCard = self._answerCard
self.cardLimit = self._cardLimit
self.answerPreSave = None
self.spaceCards = self._spaceCards
self.scheduler = "standard"
# restore any cards temporarily suspended by alternate schedulers
try:
self.resetAfterReviewEarly()
except OperationalError, e:
# will fail if deck hasn't been upgraded yet
pass
def fillQueues(self):
self.fillFailedQueue()
self.fillRevQueue()
self.fillNewQueue()
def rebuildCounts(self):
# global counts
self.cardCount = self.s.scalar("select count(*) from cards")
self.factCount = self.s.scalar("select count(*) from facts")
# due counts
self.rebuildFailedCount()
self.rebuildRevCount()
self.rebuildNewCount()
def _cardLimit(self, active, inactive, sql):
yes = parseTags(self.getVar(active))
no = parseTags(self.getVar(inactive))
if yes:
yids = tagIds(self.s, yes).values()
nids = tagIds(self.s, no).values()
return sql.replace(
"where",
"where +c.id in (select cardId from cardTags where "
"tagId in %s) and +c.id not in (select cardId from "
"cardTags where tagId in %s) and" % (
ids2str(yids),
ids2str(nids)))
elif no:
nids = tagIds(self.s, no).values()
return sql.replace(
"where",
"where +c.id not in (select cardId from cardTags where "
"tagId in %s) and" % ids2str(nids))
else:
return sql
def _rebuildFailedCount(self):
# This is a count of all failed cards within the current day cutoff.
# The cards may not be ready for review yet, but can still be
# displayed if failedCardsMax is reached.
self.failedSoonCount = self.s.scalar(
self.cardLimit(
"revActive", "revInactive",
"select count(*) from cards c where type = 0 "
"and combinedDue < :lim"), lim=self.failedCutoff)
def _rebuildRevCount(self):
self.revCount = self.s.scalar(
self.cardLimit(
"revActive", "revInactive",
"select count(*) from cards c where type = 1 "
"and combinedDue < :lim"), lim=self.dueCutoff)
def _rebuildNewCount(self):
self.newCount = self.s.scalar(
self.cardLimit(
"newActive", "newInactive",
"select count(*) from cards c where type = 2 "
"and combinedDue < :lim"), lim=self.dueCutoff)
self.updateNewCountToday()
self.spacedCards = []
def _updateNewCountToday(self):
self.newCountToday = max(min(
self.newCount, self.newCardsPerDay -
self.newCardsDoneToday()), 0)
def _fillFailedQueue(self):
if self.failedSoonCount and not self.failedQueue:
self.failedQueue = self.s.all(
self.cardLimit(
"revActive", "revInactive", """
select c.id, factId, combinedDue from cards c where
type = 0 and combinedDue < :lim order by combinedDue
limit %d""" % self.queueLimit), lim=self.failedCutoff)
self.failedQueue.reverse()
def _fillRevQueue(self):
if self.revCount and not self.revQueue:
self.revQueue = self.s.all(
self.cardLimit(
"revActive", "revInactive", """
select c.id, factId from cards c where
type = 1 and combinedDue < :lim order by %s
limit %d""" % (self.revOrder(), self.queueLimit)), lim=self.dueCutoff)
self.revQueue.reverse()
def _fillNewQueue(self):
if self.newCountToday and not self.newQueue and not self.spacedCards:
self.newQueue = self.s.all(
self.cardLimit(
"newActive", "newInactive", """
select c.id, factId from cards c where
type = 2 and combinedDue < :lim order by %s
limit %d""" % (self.newOrder(), self.queueLimit)), lim=self.dueCutoff)
self.newQueue.reverse()
def queueNotEmpty(self, queue, fillFunc, new=False):
while True:
self.removeSpaced(queue, new)
if queue:
return True
fillFunc()
if not queue:
return False
def removeSpaced(self, queue, new=False):
popped = []
delay = None
while queue:
fid = queue[-1][1]
if fid in self.spacedFacts:
# still spaced
id = queue.pop()[0]
# assuming 10 cards/minute, track id if likely to expire
# before queue refilled
if new and self.newSpacing < self.queueLimit * 6:
popped.append(id)
delay = self.spacedFacts[fid]
else:
if popped:
self.spacedCards.append((delay, popped))
return
def revNoSpaced(self):
return self.queueNotEmpty(self.revQueue, self.fillRevQueue)
def newNoSpaced(self):
return self.queueNotEmpty(self.newQueue, self.fillNewQueue, True)
def _requeueCard(self, card, oldSuc):
newType = None
try:
if card.reps == 1:
if self.newFromCache:
# fetched from spaced cache
newType = 2
cards = self.spacedCards.pop(0)[1]
# reschedule the siblings
if len(cards) > 1:
self.spacedCards.append(
(time.time() + self.newSpacing, cards[1:]))
else:
# fetched from normal queue
newType = 1
self.newQueue.pop()
elif oldSuc == 0:
self.failedQueue.pop()
else:
self.revQueue.pop()
except:
raise Exception("""\
requeueCard() failed. Please report this along with the steps you take to
produce the problem.
Counts %d %d %d
Queue %d %d %d
Card info: %d %d %d
New type: %s""" % (self.failedSoonCount, self.revCount, self.newCountToday,
len(self.failedQueue), len(self.revQueue),
len(self.newQueue),
card.reps, card.successive, oldSuc, `newType`))
def revOrder(self):
return ("priority desc, interval desc",
"priority desc, interval",
"priority desc, combinedDue",
"priority desc, factId, ordinal")[self.revCardOrder]
def newOrder(self):
return ("priority desc, due",
"priority desc, due",
"priority desc, due desc")[self.newCardOrder]
def rebuildTypes(self):
"Rebuild the type cache. Only necessary on upgrade."
# set canonical type first
self.s.statement("""
update cards set
relativeDelay = (case
when successive then 1 when reps then 0 else 2 end)
""")
# then current type based on that
self.s.statement("""
update cards set
type = (case
when type >= 0 then relativeDelay else relativeDelay - 3 end)
""")
def _cardQueue(self, card):
return self.cardType(card)
def cardType(self, card):
"Return the type of the current card (what queue it's in)"
if card.successive:
return 1
elif card.reps:
return 0
else:
return 2
def updateCutoff(self):
d = datetime.datetime.utcfromtimestamp(
time.time() - self.utcOffset) + datetime.timedelta(days=1)
d = datetime.datetime(d.year, d.month, d.day)
newday = self.utcOffset - time.timezone
d += datetime.timedelta(seconds=newday)
cutoff = time.mktime(d.timetuple())
# cutoff must not be in the past
while cutoff < time.time():
cutoff += 86400
# cutoff must not be more than 24 hours in the future
cutoff = min(time.time() + 86400, cutoff)
self.failedCutoff = cutoff
if self.getBool("perDay"):
self.dueCutoff = cutoff
else:
self.dueCutoff = time.time()
def reset(self):
# setup global/daily stats
self._globalStats = globalStats(self)
self._dailyStats = dailyStats(self)
# recheck counts
self.rebuildCounts()
# empty queues; will be refilled by getCard()
self.failedQueue = []
self.revQueue = []
self.newQueue = []
self.spacedFacts = {}
# determine new card distribution
if self.newCardSpacing == NEW_CARDS_DISTRIBUTE:
if self.newCountToday:
self.newCardModulus = (
(self.newCountToday + self.revCount) / self.newCountToday)
# if there are cards to review, ensure modulo >= 2
if self.revCount:
self.newCardModulus = max(2, self.newCardModulus)
else:
self.newCardModulus = 0
else:
self.newCardModulus = 0
# recache css
self.rebuildCSS()
# spacing for delayed cards - not to be confused with newCardSpacing
# above
self.newSpacing = self.getFloat('newSpacing')
self.revSpacing = self.getFloat('revSpacing')
def checkDay(self):
# check if the day has rolled over
if genToday(self) != self._dailyStats.day:
self.updateCutoff()
self.reset()
# Review early
##########################################################################
def setupReviewEarlyScheduler(self):
self.fillRevQueue = self._fillRevEarlyQueue
self.rebuildRevCount = self._rebuildRevEarlyCount
self.finishScheduler = self._onReviewEarlyFinished
self.answerPreSave = self._reviewEarlyPreSave
self.scheduler = "reviewEarly"
def _reviewEarlyPreSave(self, card, ease):
if ease > 1:
# prevent it from appearing in next queue fill
card.type += 6
def resetAfterReviewEarly(self):
"Put temporarily suspended cards back into play. Caller must .reset()"
# FIXME: can ignore priorities in the future
ids = self.s.column0(
"select id from cards where type between 6 and 8 or priority = -1")
if ids:
self.updatePriorities(ids)
self.s.statement(
"update cards set type = type - 6 where type between 6 and 8")
self.flushMod()
def _onReviewEarlyFinished(self):
# clean up buried cards
self.resetAfterReviewEarly()
# and go back to regular scheduler
self.setupStandardScheduler()
def _rebuildRevEarlyCount(self):
# in the future it would be nice to skip the first x days of due cards
self.revCount = self.s.scalar(
self.cardLimit(
"revActive", "revInactive", """
select count() from cards c where type = 1 and combinedDue > :now
"""), now=self.dueCutoff)
def _fillRevEarlyQueue(self):
if self.revCount and not self.revQueue:
self.revQueue = self.s.all(
self.cardLimit(
"revActive", "revInactive", """
select id, factId from cards c where type = 1 and combinedDue > :lim
order by combinedDue limit %d""" % self.queueLimit), lim=self.dueCutoff)
self.revQueue.reverse()
# Learn more
##########################################################################
def setupLearnMoreScheduler(self):
self.rebuildNewCount = self._rebuildLearnMoreCount
self.updateNewCountToday = self._updateLearnMoreCountToday
self.finishScheduler = self.setupStandardScheduler
self.scheduler = "learnMore"
def _rebuildLearnMoreCount(self):
self.newCount = self.s.scalar(
self.cardLimit(
"newActive", "newInactive",
"select count(*) from cards c where type = 2 "
"and combinedDue < :lim"), lim=self.dueCutoff)
self.spacedCards = []
def _updateLearnMoreCountToday(self):
self.newCountToday = self.newCount
# Cramming
##########################################################################
def setupCramScheduler(self, active, order):
self.getCardId = self._getCramCardId
self.activeCramTags = active
self.cramOrder = order
self.rebuildNewCount = self._rebuildCramNewCount
self.rebuildRevCount = self._rebuildCramCount
self.rebuildFailedCount = self._rebuildFailedCramCount
self.fillRevQueue = self._fillCramQueue
self.fillFailedQueue = self._fillFailedCramQueue
self.finishScheduler = self.setupStandardScheduler
self.failedCramQueue = []
self.requeueCard = self._requeueCramCard
self.cardQueue = self._cramCardQueue
self.answerCard = self._answerCramCard
self.spaceCards = self._spaceCramCards
# reuse review early's code
self.answerPreSave = self._cramPreSave
self.cardLimit = self._cramCardLimit
self.scheduler = "cram"
def _cramPreSave(self, card, ease):
# prevent it from appearing in next queue fill
card.lastInterval = self.cramLastInterval
card.type += 6
def _spaceCramCards(self, card):
self.spacedFacts[card.factId] = time.time() + self.newSpacing
def _answerCramCard(self, card, ease):
self.cramLastInterval = card.lastInterval
self._answerCard(card, ease)
if ease == 1:
self.failedCramQueue.insert(0, [card.id, card.factId])
def _getCramCardId(self, check=True):
self.checkDay()
self.fillQueues()
if self.failedCardMax and self.failedSoonCount >= self.failedCardMax:
return self.failedQueue[-1][0]
# card due for review?
if self.revNoSpaced():
return self.revQueue[-1][0]
if self.failedQueue:
return self.failedQueue[-1][0]
if check:
# collapse spaced cards before reverting back to old scheduler
self.reset()
return self.getCardId(False)
# if we're in a custom scheduler, we may need to switch back
if self.finishScheduler:
self.finishScheduler()
self.reset()
return self.getCardId()
def _cramCardQueue(self, card):
if self.revQueue and self.revQueue[-1][0] == card.id:
return 1
else:
return 0
def _requeueCramCard(self, card, oldSuc):
if self.cardQueue(card) == 1:
self.revQueue.pop()
else:
self.failedCramQueue.pop()
def _rebuildCramNewCount(self):
self.newCount = 0
self.newCountToday = 0
def _cramCardLimit(self, active, inactive, sql):
# inactive is (currently) ignored
if isinstance(active, list):
return sql.replace(
"where", "where +c.id in " + ids2str(active) + " and")
else:
yes = parseTags(active)
if yes:
yids = tagIds(self.s, yes).values()
return sql.replace(
"where ",
"where +c.id in (select cardId from cardTags where "
"tagId in %s) and " % ids2str(yids))
else:
return sql
def _fillCramQueue(self):
if self.revCount and not self.revQueue:
self.revQueue = self.s.all(self.cardLimit(
self.activeCramTags, "", """
select id, factId from cards c
where type between 0 and 2
order by %s
limit %s""" % (self.cramOrder, self.queueLimit)))
self.revQueue.reverse()
def _rebuildCramCount(self):
self.revCount = self.s.scalar(self.cardLimit(
self.activeCramTags, "",
"select count(*) from cards c where type between 0 and 2"))
def _rebuildFailedCramCount(self):
self.failedSoonCount = len(self.failedCramQueue)
def _fillFailedCramQueue(self):
self.failedQueue = self.failedCramQueue
# Getting the next card
##########################################################################
def getCard(self, orm=True):
"Return the next card object, or None."
id = self.getCardId()
if id:
return self.cardFromId(id, orm)
else:
self.stopSession()
def _getCardId(self, check=True):
"Return the next due card id, or None."
self.checkDay()
self.fillQueues()
self.updateNewCountToday()
if self.failedQueue:
# failed card due?
if self.delay0:
if self.failedQueue[-1][2] + self.delay0 < time.time():
return self.failedQueue[-1][0]
# failed card queue too big?
if (self.failedCardMax and
self.failedSoonCount >= self.failedCardMax):
return self.failedQueue[-1][0]
# distribute new cards?
if self.newNoSpaced() and self.timeForNewCard():
return self.getNewCard()
# card due for review?
if self.revNoSpaced():
return self.revQueue[-1][0]
# new cards left?
if self.newCountToday:
id = self.getNewCard()
if id:
return id
if check:
# check for expired cards, or new day rollover
self.updateCutoff()
self.reset()
return self.getCardId(check=False)
# display failed cards early/last
if not check and self.showFailedLast() and self.failedQueue:
return self.failedQueue[-1][0]
# if we're in a custom scheduler, we may need to switch back
if self.finishScheduler:
self.finishScheduler()
self.reset()
return self.getCardId()
# Get card: helper functions
##########################################################################
def _timeForNewCard(self):
"True if it's time to display a new card when distributing."
if not self.newCountToday:
return False
if self.newCardSpacing == NEW_CARDS_LAST:
return False
if self.newCardSpacing == NEW_CARDS_FIRST:
return True
# force review if there are very high priority cards
if self.revQueue:
if self.s.scalar(
"select 1 from cards where id = :id and priority = 4",
id = self.revQueue[-1][0]):
return False
if self.newCardModulus:
return self._dailyStats.reps % self.newCardModulus == 0
else:
return False
def getNewCard(self):
src = None
if (self.spacedCards and
self.spacedCards[0][0] < time.time()):
# spaced card has expired
src = 0
elif self.newQueue:
# card left in new queue
src = 1
elif self.spacedCards:
# card left in spaced queue
src = 0
else:
# only cards spaced to another day left
return
if src == 0:
cards = self.spacedCards[0][1]
self.newFromCache = True
return cards[0]
else:
self.newFromCache = False
return self.newQueue[-1][0]
def showFailedLast(self):
return self.collapseTime or not self.delay0
def cardFromId(self, id, orm=False):
"Given a card ID, return a card, and start the card timer."
if orm:
card = self.s.query(oldanki.cards.Card).get(id)
if not card:
return
card.timerStopped = False
else:
card = oldanki.cards.Card()
if not card.fromDB(self.s, id):
return
card.deck = self
card.genFuzz()
card.startTimer()
return card
# Answering a card
##########################################################################
def _answerCard(self, card, ease):
undoName = _("Answer Card")
self.setUndoStart(undoName)
now = time.time()
# old state
oldState = self.cardState(card)
oldQueue = self.cardQueue(card)
lastDelaySecs = time.time() - card.combinedDue
lastDelay = lastDelaySecs / 86400.0
oldSuc = card.successive
# update card details
last = card.interval
card.interval = self.nextInterval(card, ease)
card.lastInterval = last
if card.reps:
# only update if card was not new
card.lastDue = card.due
card.due = self.nextDue(card, ease, oldState)
card.isDue = 0
card.lastFactor = card.factor
card.spaceUntil = 0
if not self.finishScheduler:
# don't update factor in custom schedulers
self.updateFactor(card, ease)
# spacing
self.spaceCards(card)
# adjust counts for current card
if ease == 1:
if card.due < self.failedCutoff:
self.failedSoonCount += 1
if oldQueue == 0:
self.failedSoonCount -= 1
elif oldQueue == 1:
self.revCount -= 1
else:
self.newCount -= 1
# card stats
oldanki.cards.Card.updateStats(card, ease, oldState)
# update type & ensure past cutoff
card.type = self.cardType(card)
card.relativeDelay = card.type
if ease != 1:
card.due = max(card.due, self.dueCutoff+1)
# allow custom schedulers to munge the card
if self.answerPreSave:
self.answerPreSave(card, ease)
# save
card.combinedDue = card.due
card.toDB(self.s)
# global/daily stats
oldanki.stats.updateAllStats(self.s, self._globalStats, self._dailyStats,
card, ease, oldState)
# review history
entry = CardHistoryEntry(card, ease, lastDelay)
entry.writeSQL(self.s)
self.modified = now
# remove from queue
self.requeueCard(card, oldSuc)
# leech handling - we need to do this after the queue, as it may cause
# a reset()
isLeech = self.isLeech(card)
if isLeech:
self.handleLeech(card)
runHook("cardAnswered", card.id, isLeech)
self.setUndoEnd(undoName)
def _spaceCards(self, card):
new = time.time() + self.newSpacing
self.s.statement("""
update cards set
combinedDue = (case
when type = 1 then combinedDue + 86400 * (case
when interval*:rev < 1 then 0
else interval*:rev
end)
when type = 2 then :new
end),
modified = :now, isDue = 0
where id != :id and factId = :factId
and combinedDue < :cut
and type between 1 and 2""",
id=card.id, now=time.time(), factId=card.factId,
cut=self.dueCutoff, new=new, rev=self.revSpacing)
# update local cache of seen facts
self.spacedFacts[card.factId] = new
def isLeech(self, card):
no = card.noCount
fmax = self.getInt('leechFails')
if not fmax:
return
return (
# failed
not card.successive and
# greater than fail threshold
no >= fmax and
# at least threshold/2 reps since last time
(fmax - no) % (max(fmax/2, 1)) == 0)
def handleLeech(self, card):
self.refreshSession()
scard = self.cardFromId(card.id, True)
tags = scard.fact.tags
tags = addTags("Leech", tags)
scard.fact.tags = canonifyTags(tags)
scard.fact.setModified(textChanged=True, deck=self)
self.updateFactTags([scard.fact.id])
self.s.flush()
self.s.expunge(scard)
if self.getBool('suspendLeeches'):
self.suspendCards([card.id])
self.reset()
self.refreshSession()
# Interval management
##########################################################################
def nextInterval(self, card, ease):
"Return the next interval for CARD given EASE."
delay = self._adjustedDelay(card, ease)
return self._nextInterval(card, delay, ease)
def _nextInterval(self, card, delay, ease):
interval = card.interval
factor = card.factor
# if shown early
if delay < 0:
# FIXME: this should recreate lastInterval from interval /
# lastFactor, or we lose delay information when reviewing early
interval = max(card.lastInterval, card.interval + delay)
if interval < self.midIntervalMin:
interval = 0
delay = 0
# if interval is less than mid interval, use presets
if ease == 1:
interval *= self.delay2
if interval < self.hardIntervalMin:
interval = 0
elif interval == 0:
if ease == 2:
interval = random.uniform(self.hardIntervalMin,
self.hardIntervalMax)
elif ease == 3:
interval = random.uniform(self.midIntervalMin,
self.midIntervalMax)
elif ease == 4:
interval = random.uniform(self.easyIntervalMin,
self.easyIntervalMax)
else:
# if not cramming, boost initial 2
if (interval < self.hardIntervalMax and
interval > 0.166):
mid = (self.midIntervalMin + self.midIntervalMax) / 2.0
interval = mid / factor
# multiply last interval by factor
if ease == 2:
interval = (interval + delay/4) * 1.2
elif ease == 3:
interval = (interval + delay/2) * factor
elif ease == 4:
interval = (interval + delay) * factor * self.factorFour
fuzz = random.uniform(0.95, 1.05)
interval *= fuzz
if self.maxScheduleTime:
interval = min(interval, self.maxScheduleTime)
return interval
def nextIntervalStr(self, card, ease, short=False):
"Return the next interval for CARD given EASE as a string."
int = self.nextInterval(card, ease)
return oldanki.utils.fmtTimeSpan(int*86400, short=short)
def nextDue(self, card, ease, oldState):
"Return time when CARD will expire given EASE."
if ease == 1:
# 600 is a magic value which means no bonus, and is used to ease
# upgrades
cram = self.scheduler == "cram"
if (not cram and oldState == "mature"
and self.delay1 and self.delay1 != 600):
# user wants a bonus of 1+ days. put the failed cards at the
# start of the future day, so that failures that day will come
# after the waiting cards
return self.failedCutoff + (self.delay1 - 1)*86400
else:
due = 0
else:
due = card.interval * 86400.0
return due + time.time()
def updateFactor(self, card, ease):
"Update CARD's factor based on EASE."
card.lastFactor = card.factor
if not card.reps:
# card is new, inherit beginning factor
card.factor = self.averageFactor
if card.successive and not self.cardIsBeingLearnt(card):
if ease == 1:
card.factor -= 0.20
elif ease == 2:
card.factor -= 0.15
if ease == 4:
card.factor += 0.10
card.factor = max(1.3, card.factor)
def _adjustedDelay(self, card, ease):
"Return an adjusted delay value for CARD based on EASE."
if self.cardIsNew(card):
return 0
if card.reps and not card.successive:
return 0
if card.combinedDue <= self.dueCutoff:
return (self.dueCutoff - card.due) / 86400.0
else:
return (self.dueCutoff - card.combinedDue) / 86400.0
def resetCards(self, ids):
"Reset progress on cards in IDS."
self.s.statement("""
update cards set interval = :new, lastInterval = 0, lastDue = 0,
factor = 2.5, reps = 0, successive = 0, averageTime = 0, reviewTime = 0,
youngEase0 = 0, youngEase1 = 0, youngEase2 = 0, youngEase3 = 0,
youngEase4 = 0, matureEase0 = 0, matureEase1 = 0, matureEase2 = 0,
matureEase3 = 0,matureEase4 = 0, yesCount = 0, noCount = 0,
spaceUntil = 0, type = 2, relativeDelay = 2,
combinedDue = created, modified = :now, due = created, isDue = 0
where id in %s""" % ids2str(ids), now=time.time(), new=0)
if self.newCardOrder == NEW_CARDS_RANDOM:
# we need to re-randomize now
self.randomizeNewCards(ids)
self.flushMod()
self.refreshSession()
def randomizeNewCards(self, cardIds=None):
"Randomize 'due' on all new cards."
now = time.time()
query = "select distinct factId from cards where reps = 0"
if cardIds:
query += " and id in %s" % ids2str(cardIds)
fids = self.s.column0(query)
data = [{'fid': fid,
'rand': random.uniform(0, now),
'now': now} for fid in fids]
self.s.statements("""
update cards
set due = :rand + ordinal,
combinedDue = :rand + ordinal,
modified = :now
where factId = :fid
and relativeDelay = 2""", data)
def orderNewCards(self):
"Set 'due' to card creation time."
self.s.statement("""
update cards set
due = created,
combinedDue = created,
modified = :now
where relativeDelay = 2""", now=time.time())
def rescheduleCards(self, ids, min, max):
"Reset cards and schedule with new interval in days (min, max)."
self.resetCards(ids)
vals = []
for id in ids:
r = random.uniform(min*86400, max*86400)
vals.append({
'id': id,
'due': r + time.time(),
'int': r / 86400.0,
't': time.time(),
})
self.s.statements("""
update cards set
interval = :int,
due = :due,
combinedDue = :due,
reps = 1,
successive = 1,
yesCount = 1,
firstAnswered = :t,
type = 1,
relativeDelay = 1,
isDue = 0
where id = :id""", vals)
self.flushMod()
# Times
##########################################################################
def nextDueMsg(self):
next = self.earliestTime()
if next:
# all new cards except suspended
newCount = self.newCardsDueBy(self.dueCutoff + 86400)
newCardsTomorrow = min(newCount, self.newCardsPerDay)
cards = self.cardsDueBy(self.dueCutoff + 86400)
msg = _('''\
<style>b { color: #00f; }</style>
At this time tomorrow:<br>
%(wait)s<br>
%(new)s''') % {
'new': ngettext("There will be <b>%d new</b> card.",
"There will be <b>%d new</b> cards.",
newCardsTomorrow) % newCardsTomorrow,
'wait': ngettext("There will be <b>%s review</b>.",
"There will be <b>%s reviews</b>.", cards) % cards,
}
if next > (self.dueCutoff+86400) and not newCardsTomorrow:
msg = (_("The next review is in <b>%s</b>.") %
self.earliestTimeStr())
else:
msg = _("No cards are due.")
return msg
def earliestTime(self):
"""Return the time of the earliest card.
This may be in the past if the deck is not finished.
If the deck has no (enabled) cards, return None.
Ignore new cards."""
earliestRev = self.s.scalar(self.cardLimit("revActive", "revInactive", """
select combinedDue from cards c where type = 1
order by combinedDue
limit 1"""))
earliestFail = self.s.scalar(self.cardLimit("revActive", "revInactive", """
select combinedDue+%d from cards c where type = 0
order by combinedDue
limit 1""" % self.delay0))
if earliestRev and earliestFail:
return min(earliestRev, earliestFail)
elif earliestRev:
return earliestRev
else:
return earliestFail
def earliestTimeStr(self, next=None):
"""Return the relative time to the earliest card as a string."""
if next == None:
next = self.earliestTime()
if not next:
return _("unknown")
diff = next - time.time()
return oldanki.utils.fmtTimeSpan(diff)
def cardsDueBy(self, time):
"Number of cards due at TIME. Ignore new cards"
return self.s.scalar(
self.cardLimit(
"revActive", "revInactive",
"select count(*) from cards c where type between 0 and 1 "
"and combinedDue < :lim"), lim=time)
def newCardsDueBy(self, time):
"Number of new cards due at TIME."
return self.s.scalar(
self.cardLimit(
"newActive", "newInactive",
"select count(*) from cards c where type = 2 "
"and combinedDue < :lim"), lim=time)
def deckFinishedMsg(self):
spaceSusp = ""
c= self.spacedCardCount()
if c:
spaceSusp += ngettext(
'There is <b>%d delayed</b> card.',
'There are <b>%d delayed</b> cards.', c) % c
c2 = self.hiddenCards()
if c2:
if spaceSusp:
spaceSusp += "<br>"
spaceSusp += _(
"Some cards are inactive or suspended.")
if spaceSusp:
spaceSusp = "<br><br>" + spaceSusp
return _('''\
<div style="white-space: normal;">
<h1>Congratulations!</h1>You have finished for now.<br><br>
%(next)s
%(spaceSusp)s
</div>''') % {
"next": self.nextDueMsg(),
"spaceSusp": spaceSusp,
}
# Priorities
##########################################################################
def updateAllPriorities(self, partial=False, dirty=True):
"Update all card priorities if changed. Caller must .reset()"
new = self.updateTagPriorities()
if not partial:
new = self.s.all("select id, priority as pri from tags")
cids = self.s.column0(
"select distinct cardId from cardTags where tagId in %s" %
ids2str([x['id'] for x in new]))
self.updatePriorities(cids, dirty=dirty)
def updateTagPriorities(self):
"Update priority setting on tags table."
# make sure all priority tags exist
for s in (self.lowPriority, self.medPriority,
self.highPriority):
tagIds(self.s, parseTags(s))
tags = self.s.all("select tag, id, priority from tags")
tags = [(x[0].lower(), x[1], x[2]) for x in tags]
up = {}
for (type, pri) in ((self.lowPriority, 1),
(self.medPriority, 3),
(self.highPriority, 4)):
for tag in parseTags(type.lower()):
up[tag] = pri
new = []
for (tag, id, pri) in tags:
if tag in up and up[tag] != pri:
new.append({'id': id, 'pri': up[tag]})
elif tag not in up and pri != 2:
new.append({'id': id, 'pri': 2})
self.s.statements(
"update tags set priority = :pri where id = :id",
new)
return new
def updatePriorities(self, cardIds, suspend=[], dirty=True):
"Update priorities for cardIds. Caller must .reset()."
# any tags to suspend
if suspend:
ids = tagIds(self.s, suspend)
self.s.statement(
"update tags set priority = 0 where id in %s" %
ids2str(ids.values()))
if len(cardIds) > 1000:
limit = ""
else:
limit = "and cardTags.cardId in %s" % ids2str(cardIds)
cards = self.s.all("""
select cardTags.cardId,
case
when max(tags.priority) > 2 then max(tags.priority)
when min(tags.priority) = 1 then 1
else 2 end
from cardTags, tags
where cardTags.tagId = tags.id
%s
group by cardTags.cardId""" % limit)
if dirty:
extra = ", modified = :m "
else:
extra = ""
for pri in range(5):
cs = [c[0] for c in cards if c[1] == pri]
if cs:
# catch review early & buried but not suspended
self.s.statement((
"update cards set priority = :pri %s where id in %s "
"and priority != :pri and priority >= -2") % (
extra, ids2str(cs)), pri=pri, m=time.time())
def updatePriority(self, card):
"Update priority on a single card."
self.s.flush()
self.updatePriorities([card.id])
# Suspending
##########################################################################
# when older clients are upgraded, we can remove the code which touches
# priorities & isDue
def suspendCards(self, ids):
"Suspend cards. Caller must .reset()"
self.startProgress()
self.s.statement("""
update cards
set type = relativeDelay - 3,
priority = -3, modified = :t, isDue=0
where type >= 0 and id in %s""" % ids2str(ids), t=time.time())
self.flushMod()
self.finishProgress()
def unsuspendCards(self, ids):
"Unsuspend cards. Caller must .reset()"
self.startProgress()
self.s.statement("""
update cards set type = relativeDelay, priority=0, modified=:t
where type < 0 and id in %s""" %
ids2str(ids), t=time.time())
self.updatePriorities(ids)
self.flushMod()
self.finishProgress()
def buryFact(self, fact):
"Bury all cards for fact until next session. Caller must .reset()"
for card in fact.cards:
if card.type in (0,1,2):
card.priority = -2
card.type += 3
card.isDue = 0
self.flushMod()
# Counts
##########################################################################
def hiddenCards(self):
"Assumes queue finished. True if some due cards have not been shown."
return self.s.scalar("""
select 1 from cards where combinedDue < :now
and type between 0 and 1 limit 1""", now=self.dueCutoff)
def newCardsDoneToday(self):
return (self._dailyStats.newEase0 +
self._dailyStats.newEase1 +
self._dailyStats.newEase2 +
self._dailyStats.newEase3 +
self._dailyStats.newEase4)
def spacedCardCount(self):
"Number of spaced cards."
return self.s.scalar("""
select count(cards.id) from cards where
combinedDue > :now and due < :now""", now=time.time())
def isEmpty(self):
return not self.cardCount
def matureCardCount(self):
return self.s.scalar(
"select count(id) from cards where interval >= :t ",
t=MATURE_THRESHOLD)
def youngCardCount(self):
return self.s.scalar(
"select count(id) from cards where interval < :t "
"and reps != 0", t=MATURE_THRESHOLD)
def newCountAll(self):
"All new cards, including spaced."
return self.s.scalar(
"select count(id) from cards where relativeDelay = 2")
def seenCardCount(self):
return self.s.scalar(
"select count(id) from cards where relativeDelay between 0 and 1")
# Card predicates
##########################################################################
def cardState(self, card):
if self.cardIsNew(card):
return "new"
elif card.interval > MATURE_THRESHOLD:
return "mature"
return "young"
def cardIsNew(self, card):
"True if a card has never been seen before."
return card.reps == 0
def cardIsBeingLearnt(self, card):
"True if card should use present intervals."
return card.lastInterval < 7
def cardIsYoung(self, card):
"True if card is not new and not mature."
return (not self.cardIsNew(card) and
not self.cardIsMature(card))
def cardIsMature(self, card):
return card.interval >= MATURE_THRESHOLD
# Stats
##########################################################################
def getStats(self, short=False):
"Return some commonly needed stats."
stats = oldanki.stats.getStats(self.s, self._globalStats, self._dailyStats)
# add scheduling related stats
stats['new'] = self.newCountToday
stats['failed'] = self.failedSoonCount
stats['rev'] = self.revCount
if stats['dAverageTime']:
stats['timeLeft'] = oldanki.utils.fmtTimeSpan(
self.getETA(stats), pad=0, point=1, short=short)
else:
stats['timeLeft'] = _("Unknown")
return stats
def getETA(self, stats):
# rev + new cards first, account for failures
count = stats['rev'] + stats['new']
count *= 1 + stats['gYoungNo%'] / 100.0
left = count * stats['dAverageTime']
# failed - higher time per card for higher amount of cards
failedBaseMulti = 1.5
failedMod = 0.07
failedBaseCount = 20
factor = (failedBaseMulti +
(failedMod * (stats['failed'] - failedBaseCount)))
left += stats['failed'] * stats['dAverageTime'] * factor
return left
# Facts
##########################################################################
def newFact(self, model=None):
"Return a new fact with the current model."
if model is None:
model = self.currentModel
return oldanki.facts.Fact(model)
def addFact(self, fact, reset=True):
"Add a fact to the deck. Return list of new cards."
if not fact.model:
fact.model = self.currentModel
# validate
fact.assertValid()
fact.assertUnique(self.s)
# check we have card models available
cms = self.availableCardModels(fact)
if not cms:
return None
# proceed
cards = []
self.s.save(fact)
# update field cache
self.factCount += 1
self.flushMod()
isRandom = self.newCardOrder == NEW_CARDS_RANDOM
if isRandom:
due = random.uniform(0, time.time())
t = time.time()
for cardModel in cms:
created = fact.created + 0.00001*cardModel.ordinal
card = oldanki.cards.Card(fact, cardModel, created)
if isRandom:
card.due = due
card.combinedDue = due
self.flushMod()
cards.append(card)
# update card q/a
fact.setModified(True, self)
self.updateFactTags([fact.id])
# this will call reset() which will update counts
self.updatePriorities([c.id for c in cards])
# keep track of last used tags for convenience
self.lastTags = fact.tags
self.flushMod()
if reset:
self.reset()
return fact
def availableCardModels(self, fact, checkActive=True):
"List of active card models that aren't empty for FACT."
models = []
for cardModel in fact.model.cardModels:
if cardModel.active or not checkActive:
ok = True
for (type, format) in [("q", cardModel.qformat),
("a", cardModel.aformat)]:
# compat
format = re.sub("%\((.+?)\)s", "{{\\1}}", format)
empty = {}
local = {}; local.update(fact)
local['tags'] = u""
local['Tags'] = u""
local['cardModel'] = u""
local['modelName'] = u""
for k in local.keys():
empty[k] = u""
empty["text:"+k] = u""
local["text:"+k] = local[k]
empty['tags'] = ""
local['tags'] = fact.tags
try:
if (render(format, local) ==
render(format, empty)):
ok = False
break
except (KeyError, TypeError, ValueError):
ok = False
break
if ok or type == "a" and cardModel.allowEmptyAnswer:
models.append(cardModel)
return models
def addCards(self, fact, cardModelIds):
"Caller must flush first, flushMod after, rebuild priorities."
ids = []
for cardModel in self.availableCardModels(fact, False):
if cardModel.id not in cardModelIds:
continue
if self.s.scalar("""
select count(id) from cards
where factId = :fid and cardModelId = :cmid""",
fid=fact.id, cmid=cardModel.id) == 0:
# enough for 10 card models assuming 0.00001 timer precision
card = oldanki.cards.Card(
fact, cardModel,
fact.created+0.0001*cardModel.ordinal)
self.updateCardTags([card.id])
self.updatePriority(card)
self.cardCount += 1
self.newCount += 1
ids.append(card.id)
if ids:
fact.setModified(textChanged=True, deck=self)
self.setModified()
return ids
def factIsInvalid(self, fact):
"True if existing fact is invalid. Returns the error."
try:
fact.assertValid()
fact.assertUnique(self.s)
except FactInvalidError, e:
return e
def factUseCount(self, factId):
"Return number of cards referencing a given fact id."
return self.s.scalar("select count(id) from cards where factId = :id",
id=factId)
def deleteFact(self, factId):
"Delete a fact. Removes any associated cards. Don't flush."
self.s.flush()
# remove any remaining cards
self.s.statement("insert into cardsDeleted select id, :time "
"from cards where factId = :factId",
time=time.time(), factId=factId)
self.s.statement(
"delete from cards where factId = :id", id=factId)
# and then the fact
self.deleteFacts([factId])
self.setModified()
def deleteFacts(self, ids):
"Bulk delete facts by ID; don't touch cards. Caller must .reset()."
if not ids:
return
self.s.flush()
now = time.time()
strids = ids2str(ids)
self.s.statement("delete from facts where id in %s" % strids)
self.s.statement("delete from fields where factId in %s" % strids)
data = [{'id': id, 'time': now} for id in ids]
self.s.statements("insert into factsDeleted values (:id, :time)", data)
self.setModified()
def deleteDanglingFacts(self):
"Delete any facts without cards. Return deleted ids."
ids = self.s.column0("""
select facts.id from facts
where facts.id not in (select distinct factId from cards)""")
self.deleteFacts(ids)
return ids
def previewFact(self, oldFact, cms=None):
"Duplicate fact and generate cards for preview. Don't add to deck."
# check we have card models available
if cms is None:
cms = self.availableCardModels(oldFact, checkActive=True)
if not cms:
return []
fact = self.cloneFact(oldFact)
# proceed
cards = []
for cardModel in cms:
card = oldanki.cards.Card(fact, cardModel)
cards.append(card)
fact.setModified(textChanged=True, deck=self, media=False)
return cards
def cloneFact(self, oldFact):
"Copy fact into new session."
model = self.s.query(Model).get(oldFact.model.id)
fact = self.newFact(model)
for field in fact.fields:
fact[field.name] = oldFact[field.name]
fact.tags = oldFact.tags
return fact
# Cards
##########################################################################
def deleteCard(self, id):
"Delete a card given its id. Delete any unused facts. Don't flush."
self.deleteCards([id])
def deleteCards(self, ids):
"Bulk delete cards by ID. Caller must .reset()"
if not ids:
return
self.s.flush()
now = time.time()
strids = ids2str(ids)
self.startProgress()
# grab fact ids
factIds = self.s.column0("select factId from cards where id in %s"
% strids)
# drop from cards
self.s.statement("delete from cards where id in %s" % strids)
# note deleted
data = [{'id': id, 'time': now} for id in ids]
self.s.statements("insert into cardsDeleted values (:id, :time)", data)
# gather affected tags
tags = self.s.column0(
"select tagId from cardTags where cardId in %s" %
strids)
# delete
self.s.statement("delete from cardTags where cardId in %s" % strids)
# find out if they're used by anything else
unused = []
for tag in tags:
if not self.s.scalar(
"select 1 from cardTags where tagId = :d limit 1", d=tag):
unused.append(tag)
# delete unused
self.s.statement("delete from tags where id in %s and priority = 2" %
ids2str(unused))
# remove any dangling facts
self.deleteDanglingFacts()
self.refreshSession()
self.flushMod()
self.finishProgress()
# Models
##########################################################################
def addModel(self, model):
if model not in self.models:
self.models.append(model)
self.currentModel = model
self.flushMod()
def deleteModel(self, model):
"Delete MODEL, and all its cards/facts. Caller must .reset()."
if self.s.scalar("select count(id) from models where id=:id",
id=model.id):
# delete facts/cards
self.currentModel
self.deleteCards(self.s.column0("""
select cards.id from cards, facts where
facts.modelId = :id and
facts.id = cards.factId""", id=model.id))
# then the model
self.models.remove(model)
self.s.delete(model)
self.s.flush()
if self.currentModel == model:
self.currentModel = self.models[0]
self.s.statement("insert into modelsDeleted values (:id, :time)",
id=model.id, time=time.time())
self.flushMod()
self.refreshSession()
self.setModified()
def modelUseCount(self, model):
"Return number of facts using model."
return self.s.scalar("select count(facts.modelId) from facts "
"where facts.modelId = :id",
id=model.id)
def deleteEmptyModels(self):
for model in self.models:
if not self.modelUseCount(model):
self.deleteModel(model)
def rebuildCSS(self):
# css for all fields
def _genCSS(prefix, row):
(id, fam, siz, col, align, rtl, pre) = row
t = ""
if fam: t += 'font-family:"%s";' % toPlatformFont(fam)
if siz: t += 'font-size:%dpx;' % siz
if col: t += 'color:%s;' % col
if rtl == "rtl":
t += "direction:rtl;unicode-bidi:embed;"
if pre:
t += "white-space:pre-wrap;"
if align != -1:
if align == 0: align = "center"
elif align == 1: align = "left"
else: align = "right"
t += 'text-align:%s;' % align
if t:
t = "%s%s {%s}\n" % (prefix, hexifyID(id), t)
return t
css = "".join([_genCSS(".fm", row) for row in self.s.all("""
select id, quizFontFamily, quizFontSize, quizFontColour, -1,
features, editFontFamily from fieldModels""")])
cardRows = self.s.all("""
select id, null, null, null, questionAlign, 0, 0 from cardModels""")
css += "".join([_genCSS("#cmq", row) for row in cardRows])
css += "".join([_genCSS("#cma", row) for row in cardRows])
css += "".join([".cmb%s {background:%s;}\n" %
(hexifyID(row[0]), row[1]) for row in self.s.all("""
select id, lastFontColour from cardModels""")])
self.css = css
self.setVar("cssCache", css, mod=False)
self.addHexCache()
return css
def addHexCache(self):
ids = self.s.column0("""
select id from fieldModels union
select id from cardModels union
select id from models""")
cache = {}
for id in ids:
cache[id] = hexifyID(id)
self.setVar("hexCache", simplejson.dumps(cache), mod=False)
def copyModel(self, oldModel):
"Add a new model to DB based on MODEL."
m = Model(_("%s copy") % oldModel.name)
for f in oldModel.fieldModels:
f = f.copy()
m.addFieldModel(f)
for c in oldModel.cardModels:
c = c.copy()
m.addCardModel(c)
for attr in ("tags", "spacing", "initialSpacing"):
setattr(m, attr, getattr(oldModel, attr))
self.addModel(m)
return m
def changeModel(self, factIds, newModel, fieldMap, cardMap):
"Caller must .reset()"
self.s.flush()
fids = ids2str(factIds)
changed = False
# field remapping
if fieldMap:
changed = True
self.startProgress(len(fieldMap)+2)
seen = {}
for (old, new) in fieldMap.items():
self.updateProgress(_("Changing fields..."))
seen[new] = 1
if new:
# can rename
self.s.statement("""
update fields set
fieldModelId = :new,
ordinal = :ord
where fieldModelId = :old
and factId in %s""" % fids, new=new.id, ord=new.ordinal, old=old.id)
else:
# no longer used
self.s.statement("""
delete from fields where factId in %s
and fieldModelId = :id""" % fids, id=old.id)
# new
for field in newModel.fieldModels:
self.updateProgress()
if field not in seen:
d = [{'id': genID(),
'fid': f,
'fmid': field.id,
'ord': field.ordinal}
for f in factIds]
self.s.statements('''
insert into fields
(id, factId, fieldModelId, ordinal, value)
values
(:id, :fid, :fmid, :ord, "")''', d)
# fact modtime
self.updateProgress()
self.s.statement("""
update facts set
modified = :t,
modelId = :id
where id in %s""" % fids, t=time.time(), id=newModel.id)
self.finishProgress()
# template remapping
self.startProgress(len(cardMap)+4)
toChange = []
self.updateProgress(_("Changing cards..."))
for (old, new) in cardMap.items():
if not new:
# delete
self.s.statement("""
delete from cards
where cardModelId = :cid and
factId in %s""" % fids, cid=old.id)
elif old != new:
# gather ids so we can rename x->y and y->x
ids = self.s.column0("""
select id from cards where
cardModelId = :id and factId in %s""" % fids, id=old.id)
toChange.append((new, ids))
for (new, ids) in toChange:
self.updateProgress()
self.s.statement("""
update cards set
cardModelId = :new,
ordinal = :ord
where id in %s""" % ids2str(ids), new=new.id, ord=new.ordinal)
self.updateProgress()
self.updateCardQACacheFromIds(factIds, type="facts")
self.flushMod()
self.updateProgress()
cardIds = self.s.column0(
"select id from cards where factId in %s" %
ids2str(factIds))
self.updateCardTags(cardIds)
self.updateProgress()
self.updatePriorities(cardIds)
self.updateProgress()
self.refreshSession()
self.finishProgress()
# Fields
##########################################################################
def allFields(self):
"Return a list of all possible fields across all models."
return self.s.column0("select distinct name from fieldmodels")
def deleteFieldModel(self, model, field):
self.startProgress()
self.s.statement("delete from fields where fieldModelId = :id",
id=field.id)
self.s.statement("update facts set modified = :t where modelId = :id",
id=model.id, t=time.time())
model.fieldModels.remove(field)
# update q/a formats
for cm in model.cardModels:
types = ("%%(%s)s" % field.name,
"%%(text:%s)s" % field.name,
# new style
"<<%s>>" % field.name,
"<<text:%s>>" % field.name)
for t in types:
for fmt in ('qformat', 'aformat'):
setattr(cm, fmt, getattr(cm, fmt).replace(t, ""))
self.updateCardsFromModel(model)
model.setModified()
self.flushMod()
self.finishProgress()
def addFieldModel(self, model, field):
"Add FIELD to MODEL and update cards."
model.addFieldModel(field)
# commit field to disk
self.s.flush()
self.s.statement("""
insert into fields (factId, fieldModelId, ordinal, value)
select facts.id, :fmid, :ordinal, "" from facts
where facts.modelId = :mid""", fmid=field.id, mid=model.id, ordinal=field.ordinal)
# ensure facts are marked updated
self.s.statement("""
update facts set modified = :t where modelId = :mid"""
, t=time.time(), mid=model.id)
model.setModified()
self.flushMod()
def renameFieldModel(self, model, field, newName):
"Change FIELD's name in MODEL and update FIELD in all facts."
for cm in model.cardModels:
types = ("%%(%s)s",
"%%(text:%s)s",
# new styles
"{{%s}}",
"{{text:%s}}",
"{{#%s}}",
"{{^%s}}",
"{{/%s}}")
for t in types:
for fmt in ('qformat', 'aformat'):
setattr(cm, fmt, getattr(cm, fmt).replace(t%field.name,
t%newName))
field.name = newName
model.setModified()
self.flushMod()
def fieldModelUseCount(self, fieldModel):
"Return the number of cards using fieldModel."
return self.s.scalar("""
select count(id) from fields where
fieldModelId = :id and value != ""
""", id=fieldModel.id)
def rebuildFieldOrdinals(self, modelId, ids):
"""Update field ordinal for all fields given field model IDS.
Caller must update model modtime."""
self.s.flush()
strids = ids2str(ids)
self.s.statement("""
update fields
set ordinal = (select ordinal from fieldModels where id = fieldModelId)
where fields.fieldModelId in %s""" % strids)
# dirty associated facts
self.s.statement("""
update facts
set modified = strftime("%s", "now")
where modelId = :id""", id=modelId)
self.flushMod()
# Card models
##########################################################################
def cardModelUseCount(self, cardModel):
"Return the number of cards using cardModel."
return self.s.scalar("""
select count(id) from cards where
cardModelId = :id""", id=cardModel.id)
def deleteCardModel(self, model, cardModel):
"Delete all cards that use CARDMODEL from the deck."
cards = self.s.column0("select id from cards where cardModelId = :id",
id=cardModel.id)
self.deleteCards(cards)
model.cardModels.remove(cardModel)
model.setModified()
self.flushMod()
def updateCardsFromModel(self, model, dirty=True):
"Update all card question/answer when model changes."
ids = self.s.all("""
select cards.id, cards.cardModelId, cards.factId, facts.modelId from
cards, facts where
cards.factId = facts.id and
facts.modelId = :id""", id=model.id)
if not ids:
return
self.updateCardQACache(ids, dirty)
def updateCardsFromFactIds(self, ids, dirty=True):
"Update all card question/answer when model changes."
ids = self.s.all("""
select cards.id, cards.cardModelId, cards.factId, facts.modelId from
cards, facts where
cards.factId = facts.id and
facts.id in %s""" % ids2str(ids))
if not ids:
return
self.updateCardQACache(ids, dirty)
def updateCardQACacheFromIds(self, ids, type="cards"):
"Given a list of card or fact ids, update q/a cache."
if type == "facts":
# convert to card ids
ids = self.s.column0(
"select id from cards where factId in %s" % ids2str(ids))
rows = self.s.all("""
select c.id, c.cardModelId, f.id, f.modelId
from cards as c, facts as f
where c.factId = f.id
and c.id in %s""" % ids2str(ids))
self.updateCardQACache(rows)
def updateCardQACache(self, ids, dirty=True):
"Given a list of (cardId, cardModelId, factId, modId), update q/a cache."
# we don't need the q/a cache for upgrading
return
if dirty:
mod = ", modified = %f" % time.time()
else:
mod = ""
# tags
cids = ids2str([x[0] for x in ids])
tags = dict([(x[0], x[1:]) for x in
self.splitTagsList(
where="and cards.id in %s" % cids)])
facts = {}
# fields
for k, g in groupby(self.s.all("""
select fields.factId, fieldModels.name, fieldModels.id, fields.value
from fields, fieldModels where fields.factId in %s and
fields.fieldModelId = fieldModels.id
order by fields.factId""" % ids2str([x[2] for x in ids])),
itemgetter(0)):
facts[k] = dict([(r[1], (r[2], r[3])) for r in g])
# card models
cms = {}
for c in self.s.query(CardModel).all():
cms[c.id] = c
pend = [formatQA(cid, mid, facts[fid], tags[cid], cms[cmid], self)
for (cid, cmid, fid, mid) in ids]
if pend:
# find existing media references
files = {}
for txt in self.s.column0(
"select question || answer from cards where id in %s" %
cids):
for f in mediaFiles(txt):
if f in files:
files[f] -= 1
else:
files[f] = -1
# determine ref count delta
for p in pend:
for type in ("question", "answer"):
txt = p[type]
for f in mediaFiles(txt):
if f in files:
files[f] += 1
else:
files[f] = 1
# update references - this could be more efficient
for (f, cnt) in files.items():
if not cnt:
continue
updateMediaCount(self, f, cnt)
# update q/a
self.s.execute("""
update cards set
question = :question, answer = :answer
%s
where id = :id""" % mod, pend)
# update fields cache
self.updateFieldCache(facts.keys())
if dirty:
self.flushMod()
def updateFieldCache(self, fids):
"Add stripped HTML cache for sorting/searching."
try:
all = self.s.all(
("select factId, group_concat(value, ' ') from fields "
"where factId in %s group by factId") % ids2str(fids))
except:
# older sqlite doesn't support group_concat. this code taken from
# the wm port
all=[]
for factId in fids:
values=self.s.all("select value from fields where value is not NULL and factId=%(factId)i" % {"factId": factId})
value_list=[]
for row in values:
value_list.append(row[0])
concatenated_values=' '.join(value_list)
all.append([factId, concatenated_values])
r = []
from oldanki.utils import stripHTMLMedia
for a in all:
r.append({'id':a[0], 'v':stripHTMLMedia(a[1])})
self.s.statements(
"update facts set spaceUntil=:v where id=:id", r)
def rebuildCardOrdinals(self, ids):
"Update all card models in IDS. Caller must update model modtime."
self.s.flush()
strids = ids2str(ids)
self.s.statement("""
update cards set
ordinal = (select ordinal from cardModels where id = cardModelId),
modified = :now
where cardModelId in %s""" % strids, now=time.time())
self.flushMod()
def changeCardModel(self, cardIds, newCardModelId):
self.s.statement("""
update cards set cardModelId = :newId
where id in %s""" % ids2str(cardIds), newId=newCardModelId)
self.updateCardQACacheFromIds(cardIds)
self.flushMod()
# Tags: querying
##########################################################################
def tagsList(self, where="", priority=", cards.priority", kwargs={}):
"Return a list of (cardId, allTags, priority)"
return self.s.all("""
select cards.id, facts.tags || " " || models.tags || " " ||
cardModels.name %s from cards, facts, models, cardModels where
cards.factId == facts.id and facts.modelId == models.id
and cards.cardModelId = cardModels.id %s""" % (priority, where),
**kwargs)
return self.s.all("""
select cards.id, facts.tags || " " || models.tags || " " ||
cardModels.name %s from cards, facts, models, cardModels where
cards.factId == facts.id and facts.modelId == models.id
and cards.cardModelId = cardModels.id %s""" % (priority, where))
def splitTagsList(self, where=""):
return self.s.all("""
select cards.id, facts.tags, models.tags, cardModels.name
from cards, facts, models, cardModels where
cards.factId == facts.id and facts.modelId == models.id
and cards.cardModelId = cardModels.id
%s""" % where)
def cardsWithNoTags(self):
return self.s.column0("""
select cards.id from cards, facts where
facts.tags = ""
and cards.factId = facts.id""")
def cardsWithTags(self, tagStr, search="and"):
tagIds = []
# get ids
for tag in tagStr.split(" "):
tag = tag.replace("*", "%")
if "%" in tag:
ids = self.s.column0(
"select id from tags where tag like :tag", tag=tag)
if search == "and" and not ids:
return []
tagIds.append(ids)
else:
id = self.s.scalar(
"select id from tags where tag = :tag", tag=tag)
if search == "and" and not id:
return []
tagIds.append(id)
# search for any
if search == "or":
return self.s.column0(
"select cardId from cardTags where tagId in %s" %
ids2str(tagIds))
else:
# search for all
l = []
for ids in tagIds:
if isinstance(ids, types.ListType):
l.append("select cardId from cardTags where tagId in %s" %
ids2str(ids))
else:
l.append("select cardId from cardTags where tagId = %d" %
ids)
q = " intersect ".join(l)
return self.s.column0(q)
def allTags(self):
return self.s.column0("select tag from tags order by tag")
def allTags_(self, where=""):
t = self.s.column0("select tags from facts %s" % where)
t += self.s.column0("select tags from models")
t += self.s.column0("select name from cardModels")
return sorted(list(set(parseTags(joinTags(t)))))
def allUserTags(self):
return sorted(list(set(parseTags(joinTags(self.s.column0(
"select tags from facts"))))))
def factTags(self, ids):
return self.s.all("""
select id, tags from facts
where id in %s""" % ids2str(ids))
# Tags: caching
##########################################################################
def updateFactTags(self, factIds):
self.updateCardTags(self.s.column0(
"select id from cards where factId in %s" %
ids2str(factIds)))
def updateModelTags(self, modelId):
self.updateCardTags(self.s.column0("""
select cards.id from cards, facts where
cards.factId = facts.id and
facts.modelId = :id""", id=modelId))
def updateCardTags(self, cardIds=None):
self.s.flush()
if cardIds is None:
self.s.statement("delete from cardTags")
self.s.statement("delete from tags")
tids = tagIds(self.s, self.allTags_())
rows = self.splitTagsList()
else:
self.s.statement("delete from cardTags where cardId in %s" %
ids2str(cardIds))
fids = ids2str(self.s.column0(
"select factId from cards where id in %s" %
ids2str(cardIds)))
tids = tagIds(self.s, self.allTags_(
where="where id in %s" % fids))
rows = self.splitTagsList(
where="and facts.id in %s" % fids)
d = []
for (id, fact, model, templ) in rows:
for tag in parseTags(fact):
d.append({"cardId": id,
"tagId": tids[tag.lower()],
"src": 0})
for tag in parseTags(model):
d.append({"cardId": id,
"tagId": tids[tag.lower()],
"src": 1})
for tag in parseTags(templ):
d.append({"cardId": id,
"tagId": tids[tag.lower()],
"src": 2})
if d:
self.s.statements("""
insert into cardTags
(cardId, tagId, src) values
(:cardId, :tagId, :src)""", d)
self.s.execute(
"delete from tags where priority = 2 and id not in "+
"(select distinct tagId from cardTags)")
def updateTagsForModel(self, model):
cards = self.s.all("""
select cards.id, cards.cardModelId from cards, facts where
facts.modelId = :m and cards.factId = facts.id""", m=model.id)
cardIds = [x[0] for x in cards]
factIds = self.s.column0("""
select facts.id from facts where
facts.modelId = :m""", m=model.id)
cmtags = " ".join([cm.name for cm in model.cardModels])
tids = tagIds(self.s, parseTags(model.tags) +
parseTags(cmtags))
self.s.statement("""
delete from cardTags where cardId in %s
and src in (1, 2)""" % ids2str(cardIds))
d = []
for tag in parseTags(model.tags):
for id in cardIds:
d.append({"cardId": id,
"tagId": tids[tag.lower()],
"src": 1})
cmtags = {}
for cm in model.cardModels:
cmtags[cm.id] = parseTags(cm.name)
for c in cards:
for tag in cmtags[c[1]]:
d.append({"cardId": c[0],
"tagId": tids[tag.lower()],
"src": 2})
if d:
self.s.statements("""
insert into cardTags
(cardId, tagId, src) values
(:cardId, :tagId, :src)""", d)
self.s.statement("""
delete from tags where id not in (select distinct tagId from cardTags)
and priority = 2
""")
# Tags: adding/removing in bulk
##########################################################################
# these could be optimized to use the tag cache in the future
def addTags(self, ids, tags):
"Add tags in bulk. Caller must .reset()"
self.startProgress()
tlist = self.factTags(ids)
newTags = parseTags(tags)
now = time.time()
pending = []
for (id, tags) in tlist:
oldTags = parseTags(tags)
tmpTags = list(set(oldTags + newTags))
if tmpTags != oldTags:
pending.append(
{'id': id, 'now': now, 'tags': " ".join(tmpTags)})
self.s.statements("""
update facts set
tags = :tags,
modified = :now
where id = :id""", pending)
factIds = [c['id'] for c in pending]
cardIds = self.s.column0(
"select id from cards where factId in %s" %
ids2str(factIds))
self.updateCardQACacheFromIds(factIds, type="facts")
self.updateCardTags(cardIds)
self.updatePriorities(cardIds)
self.flushMod()
self.finishProgress()
self.refreshSession()
def deleteTags(self, ids, tags):
"Delete tags in bulk. Caller must .reset()"
self.startProgress()
tlist = self.factTags(ids)
newTags = parseTags(tags)
now = time.time()
pending = []
for (id, tags) in tlist:
oldTags = parseTags(tags)
tmpTags = oldTags[:]
for tag in newTags:
try:
tmpTags.remove(tag)
except ValueError:
pass
if tmpTags != oldTags:
pending.append(
{'id': id, 'now': now, 'tags': " ".join(tmpTags)})
self.s.statements("""
update facts set
tags = :tags,
modified = :now
where id = :id""", pending)
factIds = [c['id'] for c in pending]
cardIds = self.s.column0(
"select id from cards where factId in %s" %
ids2str(factIds))
self.updateCardQACacheFromIds(factIds, type="facts")
self.updateCardTags(cardIds)
self.updatePriorities(cardIds)
self.flushMod()
self.finishProgress()
self.refreshSession()
# Find
##########################################################################
def allFMFields(self, tolower=False):
fields = []
try:
fields = self.s.column0(
"select distinct name from fieldmodels order by name")
except:
fields = []
if tolower is True:
for i, v in enumerate(fields):
fields[i] = v.lower()
return fields
def _parseQuery(self, query):
tokens = []
res = []
allowedfields = self.allFMFields(True)
def addSearchFieldToken(field, value, isNeg, filter):
if field.lower() in allowedfields:
res.append((field + ':' + value, isNeg, SEARCH_FIELD, filter))
elif field in ['question', 'answer']:
res.append((field + ':' + value, isNeg, SEARCH_QA, filter))
else:
for p in phraselog:
res.append((p['value'], p['is_neg'], p['type'], p['filter']))
# break query into words or phraselog
# an extra space is added so the loop never ends in the middle
# completing a token
for match in re.findall(
r'(-)?\'(([^\'\\]|\\.)*)\'|(-)?"(([^"\\]|\\.)*)"|(-)?([^ ]+)|([ ]+)',
query + ' '):
type = ' '
if match[1]: type = "'"
elif match[4]: type = '"'
value = (match[1] or match[4] or match[7])
isNeg = (match[0] == '-' or match[3] == '-' or match[6] == '-')
tokens.append({'type': type, 'value': value, 'is_neg': isNeg,
'filter': ('wb' if type == "'" else 'none')})
intoken = isNeg = False
field = '' #name of the field for field related commands
phraselog = [] #log of phrases in case potential command is not a commad
for c, token in enumerate(tokens):
doprocess = True # only look for commands when this is true
#prevent cases such as "field" : value as being processed as a command
if len(token['value']) == 0:
if intoken is True and type == SEARCH_FIELD and field:
#case: fieldname: any thing here check for existance of fieldname
addSearchFieldToken(field, '*', isNeg, 'none')
phraselog = [] # reset phrases since command is completed
intoken = doprocess = False
if intoken is True:
if type == SEARCH_FIELD_EXISTS:
#case: field:"value"
res.append((token['value'], isNeg, type, 'none'))
intoken = doprocess = False
elif type == SEARCH_FIELD and field:
#case: fieldname:"value"
addSearchFieldToken(
field, token['value'], isNeg, token['filter'])
intoken = doprocess = False
elif type == SEARCH_FIELD and not field:
#case: "fieldname":"name" or "field" anything
if token['value'].startswith(":") and len(phraselog) == 1:
#we now know a colon is next, so mark it as field
# and keep looking for the value
field = phraselog[0]['value']
parts = token['value'].split(':', 1)
phraselog.append(
{'value': token['value'], 'is_neg': False,
'type': SEARCH_PHRASE, 'filter': token['filter']})
if parts[1]:
#value is included with the :, so wrap it up
addSearchFieldToken(field, parts[1], isNeg, 'none')
intoken = doprocess = False
doprocess = False
else:
#case: "fieldname"string/"fieldname"tag:name
intoken = False
if intoken is False and doprocess is False:
#command has been fully processed
phraselog = [] # reset phraselog, since we used it for a command
if intoken is False:
#include any non-command related phrases in the query
for p in phraselog: res.append(
(p['value'], p['is_neg'], p['type'], p['filter']))
phraselog = []
if intoken is False and doprocess is True:
field = ''
isNeg = token['is_neg']
if token['value'].startswith("tag:"):
token['value'] = token['value'][4:]
type = SEARCH_TAG
elif token['value'].startswith("is:"):
token['value'] = token['value'][3:].lower()
type = SEARCH_TYPE
elif token['value'].startswith("fid:") and len(token['value']) > 4:
dec = token['value'][4:]
try:
int(dec)
token['value'] = token['value'][4:]
except:
try:
for d in dec.split(","):
int(d)
token['value'] = token['value'][4:]
except:
token['value'] = "0"
type = SEARCH_FID
elif token['value'].startswith("card:"):
token['value'] = token['value'][5:]
type = SEARCH_CARD
elif token['value'].startswith("show:"):
token['value'] = token['value'][5:].lower()
type = SEARCH_DISTINCT
elif token['value'].startswith("field:"):
type = SEARCH_FIELD_EXISTS
parts = token['value'][6:].split(':', 1)
field = parts[0]
if len(parts) == 1 and parts[0]:
token['value'] = parts[0]
elif len(parts) == 1 and not parts[0]:
intoken = True
else:
type = SEARCH_FIELD
intoken = True
parts = token['value'].split(':', 1)
phraselog.append(
{'value': token['value'], 'is_neg': isNeg,
'type': SEARCH_PHRASE, 'filter': token['filter']})
if len(parts) == 2 and parts[0]:
field = parts[0]
if parts[1]:
#simple fieldname:value case - no need to look for more data
addSearchFieldToken(field, parts[1], isNeg, 'none')
intoken = doprocess = False
if intoken is False: phraselog = []
if intoken is False and doprocess is True:
res.append((token['value'], isNeg, type, token['filter']))
return res
def findCards(self, query):
(q, cmquery, showdistinct, filters, args) = self.findCardsWhere(query)
(factIdList, cardIdList) = self.findCardsMatchingFilters(filters)
query = "select id from cards"
hasWhere = False
if q:
query += " where " + q
hasWhere = True
if cmquery['pos'] or cmquery['neg']:
if hasWhere is False:
query += " where "
hasWhere = True
else: query += " and "
if cmquery['pos']:
query += (" factId in(select distinct factId from cards "+
"where id in (" + cmquery['pos'] + ")) ")
query += " and id in(" + cmquery['pos'] + ") "
if cmquery['neg']:
query += (" factId not in(select distinct factId from "+
"cards where id in (" + cmquery['neg'] + ")) ")
if factIdList is not None:
if hasWhere is False:
query += " where "
hasWhere = True
else: query += " and "
query += " factId IN %s" % ids2str(factIdList)
if cardIdList is not None:
if hasWhere is False:
query += " where "
hasWhere = True
else: query += " and "
query += " id IN %s" % ids2str(cardIdList)
if showdistinct:
query += " group by factId"
#print query, args
return self.s.column0(query, **args)
def findCardsWhere(self, query):
(tquery, fquery, qquery, fidquery, cmquery, sfquery, qaquery,
showdistinct, filters, args) = self._findCards(query)
q = ""
x = []
if tquery:
x.append(" id in (%s)" % tquery)
if fquery:
x.append(" factId in (%s)" % fquery)
if qquery:
x.append(" id in (%s)" % qquery)
if fidquery:
x.append(" id in (%s)" % fidquery)
if sfquery:
x.append(" factId in (%s)" % sfquery)
if qaquery:
x.append(" id in (%s)" % qaquery)
if x:
q += " and ".join(x)
return q, cmquery, showdistinct, filters, args
def findCardsMatchingFilters(self, filters):
factFilters = []
fieldFilters = {}
cardFilters = {}
factFilterMatches = []
fieldFilterMatches = []
cardFilterMatches = []
if (len(filters) > 0):
for filter in filters:
if filter['scope'] == 'fact':
regexp = re.compile(
r'\b' + re.escape(filter['value']) + r'\b', flags=re.I)
factFilters.append(
{'value': filter['value'], 'regexp': regexp,
'is_neg': filter['is_neg']})
if filter['scope'] == 'field':
fieldName = filter['field'].lower()
if (fieldName in fieldFilters) is False:
fieldFilters[fieldName] = []
regexp = re.compile(
r'\b' + re.escape(filter['value']) + r'\b', flags=re.I)
fieldFilters[fieldName].append(
{'value': filter['value'], 'regexp': regexp,
'is_neg': filter['is_neg']})
if filter['scope'] == 'card':
fieldName = filter['field'].lower()
if (fieldName in cardFilters) is False:
cardFilters[fieldName] = []
regexp = re.compile(r'\b' + re.escape(filter['value']) +
r'\b', flags=re.I)
cardFilters[fieldName].append(
{'value': filter['value'], 'regexp': regexp,
'is_neg': filter['is_neg']})
if len(factFilters) > 0:
fquery = ''
args = {}
for filter in factFilters:
c = len(args)
if fquery:
if filter['is_neg']: fquery += " except "
else: fquery += " intersect "
elif filter['is_neg']: fquery += "select id from fields except "
value = filter['value'].replace("*", "%")
args["_ff_%d" % c] = "%"+value+"%"
fquery += (
"select id from fields where value like "+
":_ff_%d escape '\\'" % c)
rows = self.s.execute(
'select factId, value from fields where id in (' +
fquery + ')', args)
while (1):
row = rows.fetchone()
if row is None: break
doesMatch = False
for filter in factFilters:
res = filter['regexp'].search(row[1])
if ((filter['is_neg'] is False and res) or
(filter['is_neg'] is True and res is None)):
factFilterMatches.append(row[0])
if len(fieldFilters) > 0:
sfquery = ''
args = {}
for field, filters in fieldFilters.iteritems():
for filter in filters:
c = len(args)
if sfquery:
if filter['is_neg']: sfquery += " except "
else: sfquery += " intersect "
elif filter['is_neg']: sfquery += "select id from fields except "
field = field.replace("*", "%")
value = filter['value'].replace("*", "%")
args["_ff_%d" % c] = "%"+value+"%"
ids = self.s.column0(
"select id from fieldmodels where name like "+
":field escape '\\'", field=field)
sfquery += ("select id from fields where "+
"fieldModelId in %s and value like "+
":_ff_%d escape '\\'") % (ids2str(ids), c)
rows = self.s.execute(
'select f.factId, f.value, fm.name from fields as f '+
'left join fieldmodels as fm ON (f.fieldModelId = '+
'fm.id) where f.id in (' + sfquery + ')', args)
while (1):
row = rows.fetchone()
if row is None: break
field = row[2].lower()
doesMatch = False
if field in fieldFilters:
for filter in fieldFilters[field]:
res = filter['regexp'].search(row[1])
if ((filter['is_neg'] is False and res) or
(filter['is_neg'] is True and res is None)):
fieldFilterMatches.append(row[0])
if len(cardFilters) > 0:
qaquery = ''
args = {}
for field, filters in cardFilters.iteritems():
for filter in filters:
c = len(args)
if qaquery:
if filter['is_neg']: qaquery += " except "
else: qaquery += " intersect "
elif filter['is_neg']: qaquery += "select id from cards except "
value = value.replace("*", "%")
args["_ff_%d" % c] = "%"+value+"%"
if field == 'question':
qaquery += "select id from cards where question "
qaquery += "like :_ff_%d escape '\\'" % c
else:
qaquery += "select id from cards where answer "
qaquery += "like :_ff_%d escape '\\'" % c
rows = self.s.execute(
'select id, question, answer from cards where id IN (' +
qaquery + ')', args)
while (1):
row = rows.fetchone()
if row is None: break
doesMatch = False
if field in cardFilters:
rowValue = row[1] if field == 'question' else row[2]
for filter in cardFilters[field]:
res = filter['regexp'].search(rowValue)
if ((filter['is_neg'] is False and res) or
(filter['is_neg'] is True and res is None)):
cardFilterMatches.append(row[0])
factIds = None
if len(factFilters) > 0 or len(fieldFilters) > 0:
factIds = []
factIds.extend(factFilterMatches)
factIds.extend(fieldFilterMatches)
cardIds = None
if len(cardFilters) > 0:
cardIds = []
cardIds.extend(cardFilterMatches)
return (factIds, cardIds)
def _findCards(self, query):
"Find facts matching QUERY."
tquery = ""
fquery = ""
qquery = ""
fidquery = ""
cmquery = { 'pos': '', 'neg': '' }
sfquery = qaquery = ""
showdistinct = False
filters = []
args = {}
for c, (token, isNeg, type, filter) in enumerate(self._parseQuery(query)):
if type == SEARCH_TAG:
# a tag
if tquery:
if isNeg:
tquery += " except "
else:
tquery += " intersect "
elif isNeg:
tquery += "select id from cards except "
if token == "none":
tquery += """
select cards.id from cards, facts where facts.tags = '' and cards.factId = facts.id """
else:
token = token.replace("*", "%")
ids = self.s.column0("""
select id from tags where tag like :tag escape '\\'""", tag=token)
tquery += """
select cardId from cardTags where cardTags.tagId in %s""" % ids2str(ids)
elif type == SEARCH_TYPE:
if qquery:
if isNeg:
qquery += " except "
else:
qquery += " intersect "
elif isNeg:
qquery += "select id from cards except "
if token in ("rev", "new", "failed"):
if token == "rev":
n = 1
elif token == "new":
n = 2
else:
n = 0
qquery += "select id from cards where type = %d" % n
elif token == "delayed":
qquery += ("select id from cards where "
"due < %d and combinedDue > %d and "
"type in (0,1,2)") % (
self.dueCutoff, self.dueCutoff)
elif token == "suspended":
qquery += ("select id from cards where "
"priority = -3")
elif token == "leech":
qquery += (
"select id from cards where noCount >= (select value "
"from deckvars where key = 'leechFails')")
else: # due
qquery += ("select id from cards where "
"type in (0,1) and combinedDue < %d") % self.dueCutoff
elif type == SEARCH_FID:
if fidquery:
if isNeg:
fidquery += " except "
else:
fidquery += " intersect "
elif isNeg:
fidquery += "select id from cards except "
fidquery += "select id from cards where factId in (%s)" % token
elif type == SEARCH_CARD:
token = token.replace("*", "%")
ids = self.s.column0("""
select id from tags where tag like :tag escape '\\'""", tag=token)
if isNeg:
if cmquery['neg']:
cmquery['neg'] += " intersect "
cmquery['neg'] += """
select cardId from cardTags where src = 2 and cardTags.tagId in %s""" % ids2str(ids)
else:
if cmquery['pos']:
cmquery['pos'] += " intersect "
cmquery['pos'] += """
select cardId from cardTags where src = 2 and cardTags.tagId in %s""" % ids2str(ids)
elif type == SEARCH_FIELD or type == SEARCH_FIELD_EXISTS:
field = value = ''
if type == SEARCH_FIELD:
parts = token.split(':', 1);
if len(parts) == 2:
field = parts[0]
value = parts[1]
elif type == SEARCH_FIELD_EXISTS:
field = token
value = '*'
if (type == SEARCH_FIELD and filter != 'none'):
if field and value:
filters.append(
{'scope': 'field', 'type': filter,
'field': field, 'value': value, 'is_neg': isNeg})
else:
if field and value:
if sfquery:
if isNeg:
sfquery += " except "
else:
sfquery += " intersect "
elif isNeg:
sfquery += "select id from facts except "
field = field.replace("*", "%")
value = value.replace("*", "%")
args["_ff_%d" % c] = "%"+value+"%"
ids = self.s.column0("""
select id from fieldmodels where name like :field escape '\\'""", field=field)
sfquery += """
select factId from fields where fieldModelId in %s and
value like :_ff_%d escape '\\'""" % (ids2str(ids), c)
elif type == SEARCH_QA:
field = value = ''
parts = token.split(':', 1);
if len(parts) == 2:
field = parts[0]
value = parts[1]
if (filter != 'none'):
if field and value:
filters.append(
{'scope': 'card', 'type': filter, 'field': field,
'value': value, 'is_neg': isNeg})
else:
if field and value:
if qaquery:
if isNeg:
qaquery += " except "
else:
qaquery += " intersect "
elif isNeg:
qaquery += "select id from cards except "
value = value.replace("*", "%")
args["_ff_%d" % c] = "%"+value+"%"
if field == 'question':
qaquery += """
select id from cards where question like :_ff_%d escape '\\'""" % c
else:
qaquery += """
select id from cards where answer like :_ff_%d escape '\\'""" % c
elif type == SEARCH_DISTINCT:
if isNeg is False:
showdistinct = True if token == "one" else False
else:
showdistinct = False if token == "one" else True
else:
if (filter != 'none'):
filters.append(
{'scope': 'fact', 'type': filter,
'value': token, 'is_neg': isNeg})
else:
if fquery:
if isNeg:
fquery += " except "
else:
fquery += " intersect "
elif isNeg:
fquery += "select id from facts except "
token = token.replace("*", "%")
args["_ff_%d" % c] = "%"+token+"%"
fquery += """
select id from facts where spaceUntil like :_ff_%d escape '\\'""" % c
return (tquery, fquery, qquery, fidquery, cmquery, sfquery,
qaquery, showdistinct, filters, args)
# Find and replace
##########################################################################
def findReplace(self, factIds, src, dst, isRe=False, field=None):
"Find and replace fields in a fact."
# find
s = "select id, factId, value from fields where factId in %s"
if isRe:
isRe = re.compile(src)
else:
s += " and value like :v"
if field:
s += " and fieldModelId = :fmid"
rows = self.s.all(s % ids2str(factIds),
v="%"+src.replace("%", "%%")+"%",
fmid=field)
modded = []
if isRe:
modded = [
{'id': id, 'fid': fid, 'val': re.sub(isRe, dst, val)}
for (id, fid, val) in rows
if isRe.search(val)]
else:
modded = [
{'id': id, 'fid': fid, 'val': val.replace(src, dst)}
for (id, fid, val) in rows
if val.find(src) != -1]
# update
self.s.statements(
'update fields set value = :val where id = :id', modded)
self.updateCardQACacheFromIds([f['fid'] for f in modded],
type="facts")
return len(set([f['fid'] for f in modded]))
# Find duplicates
##########################################################################
def findDuplicates(self, fmids):
data = self.s.all(
"select factId, value from fields where fieldModelId in %s" %
ids2str(fmids))
vals = {}
for (fid, val) in data:
if not val.strip():
continue
if val not in vals:
vals[val] = [fid]
else:
vals[val].append(fid)
return [(k,v) for (k,v) in vals.items() if len(v) > 1]
# Progress info
##########################################################################
def startProgress(self, max=0, min=0, title=None):
self.enableProgressHandler()
runHook("startProgress", max, min, title)
self.s.flush()
def updateProgress(self, label=None, value=None):
runHook("updateProgress", label, value)
def finishProgress(self):
runHook("updateProgress")
runHook("finishProgress")
self.disableProgressHandler()
def progressHandler(self):
if (time.time() - self.progressHandlerCalled) < 0.2:
return
self.progressHandlerCalled = time.time()
if self.progressHandlerEnabled:
runHook("dbProgress")
def enableProgressHandler(self):
self.progressHandlerEnabled = True
def disableProgressHandler(self):
self.progressHandlerEnabled = False
# Notifications
##########################################################################
def notify(self, msg):
"Send a notice to all listeners, or display on stdout."
if hookEmpty("notify"):
pass
else:
runHook("notify", msg)
# File-related
##########################################################################
def name(self):
if not self.path:
return u"untitled"
n = os.path.splitext(os.path.basename(self.path))[0]
assert '/' not in n
assert '\\' not in n
return n
# Session handling
##########################################################################
def startSession(self):
self.lastSessionStart = self.sessionStartTime
self.sessionStartTime = time.time()
self.sessionStartReps = self.getStats()['dTotal']
def stopSession(self):
self.sessionStartTime = 0
def sessionLimitReached(self):
if not self.sessionStartTime:
# not started
return False
if (self.sessionTimeLimit and time.time() >
(self.sessionStartTime + self.sessionTimeLimit)):
return True
if (self.sessionRepLimit and self.sessionRepLimit <=
self.getStats()['dTotal'] - self.sessionStartReps):
return True
return False
# Meta vars
##########################################################################
def getInt(self, key, type=int):
ret = self.s.scalar("select value from deckVars where key = :k",
k=key)
if ret is not None:
ret = type(ret)
return ret
def getFloat(self, key):
return self.getInt(key, float)
def getBool(self, key):
ret = self.s.scalar("select value from deckVars where key = :k",
k=key)
if ret is not None:
# hack to work around ankidroid bug
if ret.lower() == "true":
return True
elif ret.lower() == "false":
return False
else:
ret = not not int(ret)
return ret
def getVar(self, key):
"Return value for key as string, or None."
return self.s.scalar("select value from deckVars where key = :k",
k=key)
def setVar(self, key, value, mod=True):
if self.s.scalar("""
select value = :value from deckVars
where key = :key""", key=key, value=value):
return
# can't use insert or replace as it confuses the undo code
if self.s.scalar("select 1 from deckVars where key = :key", key=key):
self.s.statement("update deckVars set value=:value where key = :key",
key=key, value=value)
else:
self.s.statement("insert into deckVars (key, value) "
"values (:key, :value)", key=key, value=value)
if mod:
self.setModified()
def setVarDefault(self, key, value):
if not self.s.scalar(
"select 1 from deckVars where key = :key", key=key):
self.s.statement("insert into deckVars (key, value) "
"values (:key, :value)", key=key, value=value)
# Failed card handling
##########################################################################
def setFailedCardPolicy(self, idx):
if idx == 5:
# custom
return
self.collapseTime = 0
self.failedCardMax = 0
if idx == 0:
d = 600
self.collapseTime = 1
self.failedCardMax = 20
elif idx == 1:
d = 0
elif idx == 2:
d = 600
elif idx == 3:
d = 28800
elif idx == 4:
d = 259200
self.delay0 = d
self.delay1 = 0
def getFailedCardPolicy(self):
if self.delay1:
return 5
d = self.delay0
if self.collapseTime == 1:
if d == 600 and self.failedCardMax == 20:
return 0
return 5
if d == 0 and self.failedCardMax == 0:
return 1
elif d == 600:
return 2
elif d == 28800:
return 3
elif d == 259200:
return 4
return 5
# Media
##########################################################################
def mediaDir(self, create=False):
"Return the media directory if exists. None if couldn't create."
if self.path:
if self.mediaPrefix:
dir = os.path.join(
self.mediaPrefix, os.path.basename(self.path))
else:
dir = self.path
dir = re.sub("(?i)\.(oldanki)$", ".media", dir)
if create == None:
# don't create, but return dir
return dir
if not os.path.exists(dir) and create:
try:
os.makedirs(dir)
except OSError:
# permission denied
return None
else:
# memory-backed; need temp store
if not self.tmpMediaDir and create:
self.tmpMediaDir = tempfile.mkdtemp(prefix="oldanki")
dir = self.tmpMediaDir
if not dir or not os.path.exists(dir):
return None
# change to the current dir
os.chdir(dir)
return dir
def addMedia(self, path):
"""Add PATH to the media directory.
Return new path, relative to media dir."""
return oldanki.media.copyToMedia(self, path)
def renameMediaDir(self, oldPath):
"Copy oldPath to our current media dir. "
assert os.path.exists(oldPath)
newPath = self.mediaDir(create=None)
# copytree doesn't want the dir to exist
try:
shutil.copytree(oldPath, newPath)
except:
# FIXME: should really remove everything in old dir instead of
# giving up
pass
# DB helpers
##########################################################################
def save(self):
"Commit any pending changes to disk."
if self.lastLoaded == self.modified:
return
self.lastLoaded = self.modified
self.s.commit()
def close(self):
if self.s:
self.s.rollback()
self.s.clear()
self.s.close()
self.engine.dispose()
runHook("deckClosed")
def rollback(self):
"Roll back the current transaction and reset session state."
self.s.rollback()
self.s.clear()
self.s.update(self)
self.s.refresh(self)
def refreshSession(self):
"Flush and expire all items from the session."
self.s.flush()
self.s.expire_all()
def openSession(self):
"Open a new session. Assumes old session is already closed."
self.s = SessionHelper(self.Session(), lock=self.needLock)
self.s.update(self)
self.refreshSession()
def closeSession(self):
"Close the current session, saving any changes. Do nothing if no session."
if self.s:
self.save()
try:
self.s.expunge(self)
except:
import sys
sys.stderr.write("ERROR expunging deck..\n")
self.s.close()
self.s = None
def setModified(self, newTime=None):
#import traceback; traceback.print_stack()
self.modified = newTime or time.time()
def flushMod(self):
"Mark modified and flush to DB."
self.setModified()
self.s.flush()
def saveAs(self, newPath):
"Returns new deck. Old connection is closed without saving."
oldMediaDir = self.mediaDir()
self.s.flush()
# remove new deck if it exists
try:
os.unlink(newPath)
except OSError:
pass
self.startProgress()
# copy tables, avoiding implicit commit on current db
DeckStorage.Deck(newPath, backup=False).close()
new = sqlite.connect(newPath)
for table in self.s.column0(
"select name from sqlite_master where type = 'table'"):
if table.startswith("sqlite_"):
continue
new.execute("delete from %s" % table)
cols = [str(x[1]) for x in new.execute(
"pragma table_info('%s')" % table).fetchall()]
q = "select 'insert into %(table)s values("
q += ",".join(["'||quote(\"" + col + "\")||'" for col in cols])
q += ")' from %(table)s"
q = q % {'table': table}
c = 0
for row in self.s.execute(q):
new.execute(row[0])
if c % 1000:
self.updateProgress()
c += 1
# save new, close both
new.commit()
new.close()
self.close()
# open again in orm
newDeck = DeckStorage.Deck(newPath, backup=False)
# move media
if oldMediaDir:
newDeck.renameMediaDir(oldMediaDir)
# forget sync name
newDeck.syncName = None
newDeck.s.commit()
# and return the new deck
self.finishProgress()
return newDeck
# Syncing
##########################################################################
# toggling does not bump deck mod time, since it may happen on upgrade,
# and the variable is not synced
def enableSyncing(self):
self.syncName = unicode(checksum(self.path.encode("utf-8")))
self.s.commit()
def disableSyncing(self):
self.syncName = None
self.s.commit()
def syncingEnabled(self):
return self.syncName
def checkSyncHash(self):
if self.syncName and self.syncName != checksum(self.path.encode("utf-8")):
self.notify(_("""\
Because '%s' has been moved or copied, automatic synchronisation \
has been disabled (ERR-0100).
You can disable this check in Settings>Preferences>Network.""") % self.name())
self.disableSyncing()
self.syncName = None
# DB maintenance
##########################################################################
def recoverCards(self, ids):
"Put cards with damaged facts into new facts."
# create a new model in case the user has modified a previous one
from oldanki.stdmodels import RecoveryModel
m = RecoveryModel()
last = self.currentModel
self.addModel(m)
def repl(s):
# strip field model text
return re.sub("<span class=\"fm.*?>(.*?)</span>", "\\1", s)
# add new facts, pointing old card at new fact
for (id, q, a) in self.s.all("""
select id, question, answer from cards
where id in %s""" % ids2str(ids)):
f = self.newFact()
f['Question'] = repl(q)
f['Answer'] = repl(a)
try:
f.tags = self.s.scalar("""
select group_concat(tag, " ") from tags t, cardTags ct
where cardId = :cid and ct.tagId = t.id""", cid=id) or u""
except:
raise Exception("Your sqlite is too old.")
cards = self.addFact(f)
# delete the freshly created card and point old card to this fact
self.s.statement("delete from cards where id = :id",
id=f.cards[0].id)
self.s.statement("""
update cards set factId = :fid, cardModelId = :cmid, ordinal = 0
where id = :id""", fid=f.id, cmid=m.cardModels[0].id, id=id)
# restore old model
self.currentModel = last
def fixIntegrity(self, quick=False):
"Fix some problems and rebuild caches. Caller must .reset()"
self.s.commit()
self.resetUndo()
problems = []
recover = False
if quick:
num = 4
else:
num = 9
self.startProgress(num)
self.updateProgress(_("Checking integrity..."))
if self.s.scalar("pragma integrity_check") != "ok":
self.finishProgress()
return _("Database file is damaged.\n"
"Please restore from automatic backup (see FAQ).")
# ensure correct views and indexes are available
self.updateProgress()
DeckStorage._addViews(self)
DeckStorage._addIndices(self)
# does the user have a model?
self.updateProgress(_("Checking schema..."))
if not self.s.scalar("select count(id) from models"):
self.addModel(BasicModel())
problems.append(_("Deck was missing a model"))
# is currentModel pointing to a valid model?
if not self.s.all("""
select decks.id from decks, models where
decks.currentModelId = models.id"""):
self.currentModelId = self.models[0].id
problems.append(_("The current model didn't exist"))
# fields missing a field model
ids = self.s.column0("""
select id from fields where fieldModelId not in (
select distinct id from fieldModels)""")
if ids:
self.s.statement("delete from fields where id in %s" %
ids2str(ids))
problems.append(ngettext("Deleted %d field with missing field model",
"Deleted %d fields with missing field model", len(ids)) %
len(ids))
# facts missing a field?
ids = self.s.column0("""
select distinct facts.id from facts, fieldModels where
facts.modelId = fieldModels.modelId and fieldModels.id not in
(select fieldModelId from fields where factId = facts.id)""")
if ids:
self.deleteFacts(ids)
problems.append(ngettext("Deleted %d fact with missing fields",
"Deleted %d facts with missing fields", len(ids)) %
len(ids))
# cards missing a fact?
ids = self.s.column0("""
select id from cards where factId not in (select id from facts)""")
if ids:
recover = True
self.recoverCards(ids)
problems.append(ngettext("Recovered %d card with missing fact",
"Recovered %d cards with missing fact", len(ids)) %
len(ids))
# cards missing a card model?
ids = self.s.column0("""
select id from cards where cardModelId not in
(select id from cardModels)""")
if ids:
recover = True
self.recoverCards(ids)
problems.append(ngettext("Recovered %d card with no card template",
"Recovered %d cards with no card template", len(ids)) %
len(ids))
# cards with a card model from the wrong model
ids = self.s.column0("""
select id from cards where cardModelId not in (select cm.id from
cardModels cm, facts f where cm.modelId = f.modelId and
f.id = cards.factId)""")
if ids:
recover = True
self.recoverCards(ids)
problems.append(ngettext("Recovered %d card with wrong card template",
"Recovered %d cards with wrong card template", len(ids)) %
len(ids))
# facts missing a card?
ids = self.deleteDanglingFacts()
if ids:
problems.append(ngettext("Deleted %d fact with no cards",
"Deleted %d facts with no cards", len(ids)) %
len(ids))
# dangling fields?
ids = self.s.column0("""
select id from fields where factId not in (select id from facts)""")
if ids:
self.s.statement(
"delete from fields where id in %s" % ids2str(ids))
problems.append(ngettext("Deleted %d dangling field",
"Deleted %d dangling fields", len(ids)) %
len(ids))
self.s.flush()
if not quick:
self.updateProgress()
# these sometimes end up null on upgrade
self.s.statement("update models set source = 0 where source is null")
self.s.statement(
"update cardModels set allowEmptyAnswer = 1, typeAnswer = '' "
"where allowEmptyAnswer is null or typeAnswer is null")
# fix tags
self.updateProgress(_("Rebuilding tag cache..."))
self.updateCardTags()
# fix any priorities
self.updateProgress(_("Updating priorities..."))
self.updateAllPriorities(dirty=False)
# make sure
self.updateProgress(_("Updating ordinals..."))
self.s.statement("""
update fields set ordinal = (select ordinal from fieldModels
where id = fieldModelId)""")
# fix problems with stripping html
self.updateProgress(_("Rebuilding QA cache..."))
fields = self.s.all("select id, value from fields")
newFields = []
for (id, value) in fields:
newFields.append({'id': id, 'value': tidyHTML(value)})
self.s.statements(
"update fields set value=:value where id=:id",
newFields)
# regenerate question/answer cache
for m in self.models:
self.updateCardsFromModel(m, dirty=False)
# force a full sync
self.s.flush()
self.s.statement("update cards set modified = :t", t=time.time())
self.s.statement("update facts set modified = :t", t=time.time())
self.s.statement("update models set modified = :t", t=time.time())
self.lastSync = 0
# rebuild
self.updateProgress(_("Rebuilding types..."))
self.rebuildTypes()
# update deck and save
if not quick:
self.flushMod()
self.save()
self.refreshSession()
self.finishProgress()
if problems:
if recover:
problems.append("\n" + _("""\
Cards with corrupt or missing facts have been placed into new facts. \
Your scheduling info and card content has been preserved, but the \
original layout of the facts has been lost."""))
return "\n".join(problems)
return "ok"
def optimize(self):
oldSize = os.stat(self.path)[stat.ST_SIZE]
self.s.commit()
self.s.statement("vacuum")
self.s.statement("analyze")
newSize = os.stat(self.path)[stat.ST_SIZE]
return oldSize - newSize
# Undo/redo
##########################################################################
def initUndo(self):
# note this code ignores 'unique', as it's an sqlite reserved word
self.undoStack = []
self.redoStack = []
self.undoEnabled = True
self.s.statement(
"create temporary table undoLog (seq integer primary key not null, sql text)")
tables = self.s.column0(
"select name from sqlite_master where type = 'table'")
for table in tables:
if table in ("undoLog", "sqlite_stat1"):
continue
columns = [r[1] for r in
self.s.all("pragma table_info(%s)" % table)]
# insert
self.s.statement("""
create temp trigger _undo_%(t)s_it
after insert on %(t)s begin
insert into undoLog values
(null, 'delete from %(t)s where rowid = ' || new.rowid); end""" % {'t': table})
# update
sql = """
create temp trigger _undo_%(t)s_ut
after update on %(t)s begin
insert into undoLog values (null, 'update %(t)s """ % {'t': table}
sep = "set "
for c in columns:
if c == "unique":
continue
sql += "%(s)s%(c)s=' || quote(old.%(c)s) || '" % {
's': sep, 'c': c}
sep = ","
sql += " where rowid = ' || old.rowid); end"
self.s.statement(sql)
# delete
sql = """
create temp trigger _undo_%(t)s_dt
before delete on %(t)s begin
insert into undoLog values (null, 'insert into %(t)s (rowid""" % {'t': table}
for c in columns:
sql += ",\"%s\"" % c
sql += ") values (' || old.rowid ||'"
for c in columns:
if c == "unique":
sql += ",1"
continue
sql += ",' || quote(old.%s) ||'" % c
sql += ")'); end"
self.s.statement(sql)
def undoName(self):
for n in reversed(self.undoStack):
if n:
return n[0]
def redoName(self):
return self.redoStack[-1][0]
def undoAvailable(self):
if not self.undoEnabled:
return
for r in reversed(self.undoStack):
if r:
return True
def redoAvailable(self):
return self.undoEnabled and self.redoStack
def resetUndo(self):
try:
self.s.statement("delete from undoLog")
except:
pass
self.undoStack = []
self.redoStack = []
def setUndoBarrier(self):
if not self.undoStack or self.undoStack[-1] is not None:
self.undoStack.append(None)
def setUndoStart(self, name, merge=False):
if not self.undoEnabled:
return
self.s.flush()
if merge and self.undoStack:
if self.undoStack[-1] and self.undoStack[-1][0] == name:
# merge with last entry?
return
start = self._latestUndoRow()
self.undoStack.append([name, start, None])
def setUndoEnd(self, name):
if not self.undoEnabled:
return
self.s.flush()
end = self._latestUndoRow()
while self.undoStack[-1] is None:
# strip off barrier
self.undoStack.pop()
self.undoStack[-1][2] = end
if self.undoStack[-1][1] == self.undoStack[-1][2]:
self.undoStack.pop()
else:
self.redoStack = []
runHook("undoEnd")
def _latestUndoRow(self):
return self.s.scalar("select max(rowid) from undoLog") or 0
def _undoredo(self, src, dst):
self.s.flush()
while 1:
u = src.pop()
if u:
break
(start, end) = (u[1], u[2])
if end is None:
end = self._latestUndoRow()
sql = self.s.column0("""
select sql from undoLog where
seq > :s and seq <= :e order by seq desc""", s=start, e=end)
mod = len(sql) / 35
if mod:
self.startProgress(36)
self.updateProgress(_("Processing..."))
newstart = self._latestUndoRow()
for c, s in enumerate(sql):
if mod and not c % mod:
self.updateProgress()
self.engine.execute(s)
newend = self._latestUndoRow()
dst.append([u[0], newstart, newend])
if mod:
self.finishProgress()
def undo(self):
"Undo the last action(s). Caller must .reset()"
self._undoredo(self.undoStack, self.redoStack)
self.refreshSession()
runHook("postUndoRedo")
def redo(self):
"Redo the last action(s). Caller must .reset()"
self._undoredo(self.redoStack, self.undoStack)
self.refreshSession()
runHook("postUndoRedo")
# Dynamic indices
##########################################################################
def updateDynamicIndices(self):
indices = {
'intervalDesc':
'(type, priority desc, interval desc, factId, combinedDue)',
'intervalAsc':
'(type, priority desc, interval, factId, combinedDue)',
'randomOrder':
'(type, priority desc, factId, ordinal, combinedDue)',
'dueAsc':
'(type, priority desc, due, factId, combinedDue)',
'dueDesc':
'(type, priority desc, due desc, factId, combinedDue)',
}
# determine required
required = []
if self.revCardOrder == REV_CARDS_OLD_FIRST:
required.append("intervalDesc")
if self.revCardOrder == REV_CARDS_NEW_FIRST:
required.append("intervalAsc")
if self.revCardOrder == REV_CARDS_RANDOM:
required.append("randomOrder")
if (self.revCardOrder == REV_CARDS_DUE_FIRST or
self.newCardOrder == NEW_CARDS_OLD_FIRST or
self.newCardOrder == NEW_CARDS_RANDOM):
required.append("dueAsc")
if (self.newCardOrder == NEW_CARDS_NEW_FIRST):
required.append("dueDesc")
# add/delete
analyze = False
for (k, v) in indices.items():
n = "ix_cards_%s2" % k
if k in required:
if not self.s.scalar(
"select 1 from sqlite_master where name = :n", n=n):
self.s.statement(
"create index %s on cards %s" %
(n, v))
analyze = True
else:
# leave old indices for older clients
#self.s.statement("drop index if exists ix_cards_%s" % k)
self.s.statement("drop index if exists %s" % n)
if analyze:
self.s.statement("analyze")
# Shared decks
##########################################################################
sourcesTable = Table(
'sources', metadata,
Column('id', Integer, nullable=False, primary_key=True),
Column('name', UnicodeText, nullable=False, default=u""),
Column('created', Float, nullable=False, default=time.time),
Column('lastSync', Float, nullable=False, default=0),
# -1 = never check, 0 = always check, 1+ = number of seconds passed.
# not currently exposed in the GUI
Column('syncPeriod', Integer, nullable=False, default=0))
# Maps
##########################################################################
mapper(Deck, decksTable, properties={
'currentModel': relation(oldanki.models.Model, primaryjoin=
decksTable.c.currentModelId ==
oldanki.models.modelsTable.c.id),
'models': relation(oldanki.models.Model, post_update=True,
primaryjoin=
decksTable.c.id ==
oldanki.models.modelsTable.c.deckId),
})
# Deck storage
##########################################################################
numBackups = 30
backupDir = os.path.expanduser("~/.oldanki/backups")
class DeckStorage(object):
def Deck(path=None, backup=True, lock=True, pool=True, rebuild=True):
"Create a new deck or attach to an existing one."
create = True
if path is None:
sqlpath = None
else:
path = os.path.abspath(path)
# check if we need to init
if os.path.exists(path):
create = False
# sqlite needs utf8
sqlpath = path.encode("utf-8")
try:
(engine, session) = DeckStorage._attach(sqlpath, create, pool)
s = session()
if create:
ver = 999
metadata.create_all(engine)
deck = DeckStorage._init(s)
else:
ver = s.scalar("select version from decks limit 1")
if ver < 19:
for st in (
"decks add column newCardsPerDay integer not null default 20",
"decks add column sessionRepLimit integer not null default 100",
"decks add column sessionTimeLimit integer not null default 1800",
"decks add column utcOffset numeric(10, 2) not null default 0",
"decks add column cardCount integer not null default 0",
"decks add column factCount integer not null default 0",
"decks add column failedNowCount integer not null default 0",
"decks add column failedSoonCount integer not null default 0",
"decks add column revCount integer not null default 0",
"decks add column newCount integer not null default 0",
"decks add column revCardOrder integer not null default 0",
"cardModels add column allowEmptyAnswer boolean not null default 1",
"cardModels add column typeAnswer text not null default ''"):
try:
s.execute("alter table " + st)
except:
pass
if ver < DECK_VERSION:
metadata.create_all(engine)
deck = s.query(Deck).get(1)
if not deck:
raise DeckAccessError(_("Deck missing core table"),
type="nocore")
# attach db vars
deck.path = path
deck.engine = engine
deck.Session = session
deck.needLock = lock
deck.progressHandlerCalled = 0
deck.progressHandlerEnabled = False
if pool:
try:
deck.engine.raw_connection().set_progress_handler(
deck.progressHandler, 100)
except:
print "please install pysqlite 2.4 for better progress dialogs"
deck.engine.execute("pragma locking_mode = exclusive")
deck.s = SessionHelper(s, lock=lock)
# force a write lock
deck.s.execute("update decks set modified = modified")
needUnpack = False
if deck.utcOffset in (-1, -2):
# do the rest later
needUnpack = deck.utcOffset == -1
# make sure we do this before initVars
DeckStorage._setUTCOffset(deck)
deck.created = time.time()
if ver < 27:
initTagTables(deck.s)
if create:
# new-style file format
deck.s.commit()
deck.s.execute("pragma legacy_file_format = off")
deck.s.execute("pragma default_cache_size= 20000")
deck.s.execute("vacuum")
# add views/indices
initTagTables(deck.s)
DeckStorage._addViews(deck)
DeckStorage._addIndices(deck)
deck.s.statement("analyze")
deck._initVars()
deck.updateTagPriorities()
else:
if backup:
DeckStorage.backup(deck, path)
deck._initVars()
try:
deck = DeckStorage._upgradeDeck(deck, path)
except:
traceback.print_exc()
deck.fixIntegrity()
deck = DeckStorage._upgradeDeck(deck, path)
except OperationalError, e:
engine.dispose()
if (str(e.orig).startswith("database table is locked") or
str(e.orig).startswith("database is locked")):
raise DeckAccessError(_("File is in use by another process"),
type="inuse")
else:
raise e
if not rebuild:
# minimal startup
deck._globalStats = globalStats(deck)
deck._dailyStats = dailyStats(deck)
return deck
if needUnpack:
deck.startProgress()
DeckStorage._addIndices(deck)
for m in deck.models:
deck.updateCardsFromModel(m)
deck.finishProgress()
oldMod = deck.modified
# fix a bug with current model being unset
if not deck.currentModel and deck.models:
deck.currentModel = deck.models[0]
# ensure the necessary indices are available
deck.updateDynamicIndices()
# FIXME: temporary code for upgrade
# - ensure cards suspended on older clients are recognized
deck.s.statement("""
update cards set type = type - 3 where type between 0 and 2 and priority = -3""")
# - new delay1 handling
if deck.delay1 > 7:
deck.delay1 = 0
# unsuspend buried/rev early - can remove priorities in the future
ids = deck.s.column0(
"select id from cards where type > 2 or priority between -2 and -1")
if ids:
deck.updatePriorities(ids)
deck.s.statement(
"update cards set type = relativeDelay where type > 2")
deck.s.commit()
# check if deck has been moved, and disable syncing
deck.checkSyncHash()
# determine starting factor for new cards
deck.averageFactor = (deck.s.scalar(
"select avg(factor) from cards where type = 1")
or Deck.initialFactor)
deck.averageFactor = max(deck.averageFactor, Deck.minimumAverage)
# rebuild queue
deck.reset()
# make sure we haven't accidentally bumped the modification time
assert deck.modified == oldMod
return deck
Deck = staticmethod(Deck)
def _attach(path, create, pool=True):
"Attach to a file, initializing DB"
if path is None:
path = "sqlite://"
else:
path = "sqlite:///" + path
if pool:
# open and lock connection for single use
from sqlalchemy.pool import SingletonThreadPool
# temporary tables are effectively useless with the default
# settings in 0.7, so we need to force the pool class
engine = create_engine(path, connect_args={'timeout': 0},
poolclass=SingletonThreadPool)
else:
# no pool & concurrent access w/ timeout
engine = create_engine(path,
poolclass=NullPool,
connect_args={'timeout': 60})
session = sessionmaker(bind=engine,
autoflush=False,
autocommit=True)
return (engine, session)
_attach = staticmethod(_attach)
def _init(s):
"Add a new deck to the database. Return saved deck."
deck = Deck()
if sqlalchemy.__version__.startswith("0.4."):
s.save(deck)
else:
s.add(deck)
s.flush()
return deck
_init = staticmethod(_init)
def _addIndices(deck):
"Add indices to the DB."
# counts, failed cards
deck.s.statement("""
create index if not exists ix_cards_typeCombined on cards
(type, combinedDue, factId)""")
# scheduler-agnostic type
deck.s.statement("""
create index if not exists ix_cards_relativeDelay on cards
(relativeDelay)""")
# index on modified, to speed up sync summaries
deck.s.statement("""
create index if not exists ix_cards_modified on cards
(modified)""")
deck.s.statement("""
create index if not exists ix_facts_modified on facts
(modified)""")
# priority - temporary index to make compat code faster. this can be
# removed when all clients are on 1.2, as can the ones below
deck.s.statement("""
create index if not exists ix_cards_priority on cards
(priority)""")
# average factor
deck.s.statement("""
create index if not exists ix_cards_factor on cards
(type, factor)""")
# card spacing
deck.s.statement("""
create index if not exists ix_cards_factId on cards (factId)""")
# stats
deck.s.statement("""
create index if not exists ix_stats_typeDay on stats (type, day)""")
# fields
deck.s.statement("""
create index if not exists ix_fields_factId on fields (factId)""")
deck.s.statement("""
create index if not exists ix_fields_fieldModelId on fields (fieldModelId)""")
deck.s.statement("""
create index if not exists ix_fields_value on fields (value)""")
# media
deck.s.statement("""
create unique index if not exists ix_media_filename on media (filename)""")
deck.s.statement("""
create index if not exists ix_media_originalPath on media (originalPath)""")
# deletion tracking
deck.s.statement("""
create index if not exists ix_cardsDeleted_cardId on cardsDeleted (cardId)""")
deck.s.statement("""
create index if not exists ix_modelsDeleted_modelId on modelsDeleted (modelId)""")
deck.s.statement("""
create index if not exists ix_factsDeleted_factId on factsDeleted (factId)""")
deck.s.statement("""
create index if not exists ix_mediaDeleted_factId on mediaDeleted (mediaId)""")
# tags
txt = "create unique index if not exists ix_tags_tag on tags (tag)"
try:
deck.s.statement(txt)
except:
deck.s.statement("""
delete from tags where exists (select 1 from tags t2 where tags.tag = t2.tag
and tags.rowid > t2.rowid)""")
deck.s.statement(txt)
deck.s.statement("""
create index if not exists ix_cardTags_tagCard on cardTags (tagId, cardId)""")
deck.s.statement("""
create index if not exists ix_cardTags_cardId on cardTags (cardId)""")
_addIndices = staticmethod(_addIndices)
def _addViews(deck):
"Add latest version of SQL views to DB."
s = deck.s
# old views
s.statement("drop view if exists failedCards")
s.statement("drop view if exists revCardsOld")
s.statement("drop view if exists revCardsNew")
s.statement("drop view if exists revCardsDue")
s.statement("drop view if exists revCardsRandom")
s.statement("drop view if exists acqCardsRandom")
s.statement("drop view if exists acqCardsOld")
s.statement("drop view if exists acqCardsNew")
# failed cards
s.statement("""
create view failedCards as
select * from cards
where type = 0 and isDue = 1
order by type, isDue, combinedDue
""")
# rev cards
s.statement("""
create view revCardsOld as
select * from cards
where type = 1 and isDue = 1
order by priority desc, interval desc""")
s.statement("""
create view revCardsNew as
select * from cards
where type = 1 and isDue = 1
order by priority desc, interval""")
s.statement("""
create view revCardsDue as
select * from cards
where type = 1 and isDue = 1
order by priority desc, due""")
s.statement("""
create view revCardsRandom as
select * from cards
where type = 1 and isDue = 1
order by priority desc, factId, ordinal""")
# new cards
s.statement("""
create view acqCardsOld as
select * from cards
where type = 2 and isDue = 1
order by priority desc, due""")
s.statement("""
create view acqCardsNew as
select * from cards
where type = 2 and isDue = 1
order by priority desc, due desc""")
_addViews = staticmethod(_addViews)
def _upgradeDeck(deck, path):
"Upgrade deck to the latest version."
if deck.version < DECK_VERSION:
prog = True
deck.startProgress()
deck.updateProgress(_("Upgrading Deck..."))
if deck.utcOffset == -1:
# we're opening a shared deck with no indices - we'll need
# them if we want to rebuild the queue
DeckStorage._addIndices(deck)
oldmod = deck.modified
else:
prog = False
deck.path = path
if deck.version == 0:
# new columns
try:
deck.s.statement("""
alter table cards add column spaceUntil float not null default 0""")
deck.s.statement("""
alter table cards add column relativeDelay float not null default 0.0""")
deck.s.statement("""
alter table cards add column isDue boolean not null default 0""")
deck.s.statement("""
alter table cards add column type integer not null default 0""")
deck.s.statement("""
alter table cards add column combinedDue float not null default 0""")
# update cards.spaceUntil based on old facts
deck.s.statement("""
update cards
set spaceUntil = (select (case
when cards.id = facts.lastCardId
then 0
else facts.spaceUntil
end) from cards as c, facts
where c.factId = facts.id
and cards.id = c.id)""")
deck.s.statement("""
update cards
set combinedDue = max(due, spaceUntil)
""")
except:
print "failed to upgrade"
# rebuild with new file format
deck.s.commit()
deck.s.execute("pragma legacy_file_format = off")
deck.s.execute("vacuum")
# add views/indices
DeckStorage._addViews(deck)
DeckStorage._addIndices(deck)
# rebuild type and delay cache
deck.rebuildTypes()
deck.reset()
# bump version
deck.version = 1
# optimize indices
deck.s.statement("analyze")
if deck.version == 1:
# fix indexes and views
deck.s.statement("drop index if exists ix_cards_newRandomOrder")
deck.s.statement("drop index if exists ix_cards_newOrderedOrder")
DeckStorage._addViews(deck)
DeckStorage._addIndices(deck)
deck.rebuildTypes()
# optimize indices
deck.s.statement("analyze")
deck.version = 2
if deck.version == 2:
# compensate for bug in 0.9.7 by rebuilding isDue and priorities
deck.s.statement("update cards set isDue = 0")
deck.updateAllPriorities(dirty=False)
# compensate for bug in early 0.9.x where fieldId was not unique
deck.s.statement("update fields set id = random()")
deck.version = 3
if deck.version == 3:
# remove conflicting and unused indexes
deck.s.statement("drop index if exists ix_cards_isDueCombined")
deck.s.statement("drop index if exists ix_facts_lastCardId")
deck.s.statement("drop index if exists ix_cards_successive")
deck.s.statement("drop index if exists ix_cards_priority")
deck.s.statement("drop index if exists ix_cards_reps")
deck.s.statement("drop index if exists ix_cards_due")
deck.s.statement("drop index if exists ix_stats_type")
deck.s.statement("drop index if exists ix_stats_day")
deck.s.statement("drop index if exists ix_factsDeleted_cardId")
deck.s.statement("drop index if exists ix_modelsDeleted_cardId")
DeckStorage._addIndices(deck)
deck.s.statement("analyze")
deck.version = 4
if deck.version == 4:
# decks field upgraded earlier
deck.version = 5
if deck.version == 5:
# new spacing
deck.newCardSpacing = NEW_CARDS_DISTRIBUTE
deck.version = 6
# low priority cards now stay in same queue
deck.rebuildTypes()
if deck.version == 6:
# removed 'new cards first' option, so order has changed
deck.newCardSpacing = NEW_CARDS_DISTRIBUTE
deck.version = 7
# <version 7->8 upgrade code removed as obsolete>
if deck.version < 9:
# backup media
media = deck.s.all("""
select filename, size, created, originalPath, description from media""")
# fix mediaDeleted definition
deck.s.execute("drop table mediaDeleted")
deck.s.execute("drop table media")
metadata.create_all(deck.engine)
# restore
h = []
for row in media:
h.append({
'id': genID(),
'filename': row[0],
'size': row[1],
'created': row[2],
'originalPath': row[3],
'description': row[4]})
if h:
deck.s.statements("""
insert into media values (
:id, :filename, :size, :created, :originalPath, :description)""", h)
deck.version = 9
if deck.version < 10:
deck.s.statement("""
alter table models add column source integer not null default 0""")
deck.version = 10
if deck.version < 11:
DeckStorage._setUTCOffset(deck)
deck.version = 11
deck.s.commit()
if deck.version < 12:
deck.s.statement("drop index if exists ix_cards_revisionOrder")
deck.s.statement("drop index if exists ix_cards_newRandomOrder")
deck.s.statement("drop index if exists ix_cards_newOrderedOrder")
deck.s.statement("drop index if exists ix_cards_markExpired")
deck.s.statement("drop index if exists ix_cards_failedIsDue")
deck.s.statement("drop index if exists ix_cards_failedOrder")
deck.s.statement("drop index if exists ix_cards_type")
deck.s.statement("drop index if exists ix_cards_priority")
DeckStorage._addViews(deck)
DeckStorage._addIndices(deck)
deck.s.statement("analyze")
if deck.version < 13:
deck.reset()
deck.rebuildCounts()
# regenerate question/answer cache
for m in deck.models:
deck.updateCardsFromModel(m, dirty=False)
deck.version = 13
if deck.version < 14:
deck.s.statement("""
update cards set interval = 0
where interval < 1""")
deck.version = 14
if deck.version < 15:
deck.delay1 = deck.delay0
deck.delay2 = 0.0
deck.version = 15
if deck.version < 16:
deck.version = 16
if deck.version < 17:
deck.s.statement("drop view if exists acqCards")
deck.s.statement("drop view if exists futureCards")
deck.s.statement("drop view if exists revCards")
deck.s.statement("drop view if exists typedCards")
deck.s.statement("drop view if exists failedCardsNow")
deck.s.statement("drop view if exists failedCardsSoon")
deck.s.statement("drop index if exists ix_cards_revisionOrder")
deck.s.statement("drop index if exists ix_cards_newRandomOrder")
deck.s.statement("drop index if exists ix_cards_newOrderedOrder")
deck.s.statement("drop index if exists ix_cards_combinedDue")
# add new views
DeckStorage._addViews(deck)
DeckStorage._addIndices(deck)
deck.version = 17
if deck.version < 18:
deck.s.statement(
"create table undoLog (seq integer primary key, sql text)")
deck.version = 18
deck.s.commit()
DeckStorage._addIndices(deck)
deck.s.statement("analyze")
if deck.version < 19:
# permanent undo log causes various problems, revert to temp
deck.s.statement("drop table undoLog")
deck.sessionTimeLimit = 600
deck.sessionRepLimit = 0
deck.version = 19
deck.s.commit()
if deck.version < 20:
DeckStorage._addViews(deck)
DeckStorage._addIndices(deck)
deck.version = 20
deck.s.commit()
if deck.version < 21:
deck.s.statement("vacuum")
deck.s.statement("analyze")
deck.version = 21
deck.s.commit()
if deck.version < 22:
deck.s.statement(
'update cardModels set typeAnswer = ""')
deck.version = 22
deck.s.commit()
if deck.version < 23:
try:
deck.s.execute("drop table undoLog")
except:
pass
deck.version = 23
deck.s.commit()
if deck.version < 24:
deck.s.statement(
"update cardModels set lastFontColour = '#ffffff'")
deck.version = 24
deck.s.commit()
if deck.version < 25:
deck.s.statement("drop index if exists ix_cards_priorityDue")
deck.s.statement("drop index if exists ix_cards_priorityDueReal")
DeckStorage._addViews(deck)
DeckStorage._addIndices(deck)
deck.updateDynamicIndices()
deck.version = 25
deck.s.commit()
if deck.version < 26:
# no spaces in tags anymore, as separated by space
def munge(tags):
tags = re.sub(", ?", "--tmp--", tags)
tags = re.sub(" - ", "-", tags)
tags = re.sub(" ", "-", tags)
tags = re.sub("--tmp--", " ", tags)
tags = canonifyTags(tags)
return tags
rows = deck.s.all('select id, tags from facts')
d = []
for (id, tags) in rows:
d.append({
'i': id,
't': munge(tags),
})
deck.s.statements(
"update facts set tags = :t where id = :i", d)
for k in ('highPriority', 'medPriority',
'lowPriority', 'suspended'):
x = getattr(deck, k)
setattr(deck, k, munge(x))
for m in deck.models:
for cm in m.cardModels:
cm.name = munge(cm.name)
m.tags = munge(m.tags)
deck.updateCardsFromModel(m, dirty=False)
deck.version = 26
deck.s.commit()
deck.s.statement("vacuum")
if deck.version < 27:
DeckStorage._addIndices(deck)
deck.updateCardTags()
deck.updateAllPriorities(dirty=False)
deck.version = 27
deck.s.commit()
if deck.version < 28:
deck.s.statement("pragma default_cache_size= 20000")
deck.version = 28
deck.s.commit()
if deck.version < 30:
# remove duplicates from review history
deck.s.statement("""
delete from reviewHistory where id not in (
select min(id) from reviewHistory group by cardId, time);""")
deck.version = 30
deck.s.commit()
if deck.version < 31:
# recreate review history table
deck.s.statement("drop index if exists ix_reviewHistory_unique")
schema = """
CREATE TABLE %s (
cardId INTEGER NOT NULL,
time NUMERIC(10, 2) NOT NULL,
lastInterval NUMERIC(10, 2) NOT NULL,
nextInterval NUMERIC(10, 2) NOT NULL,
ease INTEGER NOT NULL,
delay NUMERIC(10, 2) NOT NULL,
lastFactor NUMERIC(10, 2) NOT NULL,
nextFactor NUMERIC(10, 2) NOT NULL,
reps NUMERIC(10, 2) NOT NULL,
thinkingTime NUMERIC(10, 2) NOT NULL,
yesCount NUMERIC(10, 2) NOT NULL,
noCount NUMERIC(10, 2) NOT NULL,
PRIMARY KEY (cardId, time))"""
deck.s.statement(schema % "revtmp")
deck.s.statement("""
insert into revtmp
select cardId, time, lastInterval, nextInterval, ease, delay, lastFactor,
nextFactor, reps, thinkingTime, yesCount, noCount from reviewHistory""")
deck.s.statement("drop table reviewHistory")
metadata.create_all(deck.engine)
deck.s.statement(
"insert into reviewHistory select * from revtmp")
deck.s.statement("drop table revtmp")
deck.version = 31
deck.s.commit()
deck.s.statement("vacuum")
if deck.version < 32:
deck.s.execute("drop index if exists ix_cardTags_tagId")
deck.s.execute("drop index if exists ix_cardTags_cardId")
DeckStorage._addIndices(deck)
deck.s.execute("analyze")
deck.version = 32
deck.s.commit()
if deck.version < 33:
deck.s.execute("drop index if exists ix_tags_tag")
DeckStorage._addIndices(deck)
deck.version = 33
deck.s.commit()
if deck.version < 34:
deck.s.execute("drop view if exists acqCardsRandom")
deck.s.execute("drop index if exists ix_cards_factId")
DeckStorage._addIndices(deck)
deck.updateDynamicIndices()
deck.version = 34
deck.s.commit()
if deck.version < 36:
deck.s.statement("drop index if exists ix_cards_priorityDue")
DeckStorage._addIndices(deck)
deck.s.execute("analyze")
deck.version = 36
deck.s.commit()
if deck.version < 37:
if deck.getFailedCardPolicy() == 1:
deck.failedCardMax = 0
deck.version = 37
deck.s.commit()
if deck.version < 39:
deck.reset()
# manually suspend all suspended cards
ids = deck.findCards("tag:suspended")
if ids:
# unrolled from suspendCards() to avoid marking dirty
deck.s.statement(
"update cards set isDue=0, priority=-3 "
"where id in %s" % ids2str(ids))
deck.rebuildCounts()
# suspended tag obsolete - don't do this yet
deck.suspended = re.sub(u" ?Suspended ?", u"", deck.suspended)
deck.updateTagPriorities()
deck.version = 39
deck.s.commit()
if deck.version < 40:
# now stores media url
deck.s.statement("update models set features = ''")
deck.version = 40
deck.s.commit()
if deck.version < 43:
deck.s.statement("update fieldModels set features = ''")
deck.version = 43
deck.s.commit()
if deck.version < 44:
# leaner indices
deck.s.statement("drop index if exists ix_cards_factId")
deck.version = 44
deck.s.commit()
if deck.version < 48:
deck.updateFieldCache(deck.s.column0("select id from facts"))
deck.version = 48
deck.s.commit()
if deck.version < 50:
# more new type handling
deck.rebuildTypes()
deck.version = 50
deck.s.commit()
if deck.version < 52:
dname = deck.name()
sname = deck.syncName
if sname and dname != sname:
deck.notify(_("""\
When syncing, Anki now uses the same deck name on the server as the deck \
name on your computer. Because you had '%(dname)s' set to sync to \
'%(sname)s' on the server, syncing has been temporarily disabled.
If you want to keep your changes to the online version, please use \
File>Download>Personal Deck to download the online version.
If you want to keep the version on your computer, please enable \
syncing again via Settings>Deck Properties>Synchronisation.
If you have syncing disabled in the preferences, you can ignore \
this message. (ERR-0101)""") % {
'sname':sname, 'dname':dname})
deck.disableSyncing()
elif sname:
deck.enableSyncing()
deck.version = 52
deck.s.commit()
if deck.version < 53:
if deck.getBool("perDay"):
if deck.hardIntervalMin == 0.333:
deck.hardIntervalMin = max(1.0, deck.hardIntervalMin)
deck.hardIntervalMax = max(1.1, deck.hardIntervalMax)
deck.version = 53
deck.s.commit()
if deck.version < 54:
# broken versions of the DB orm die if this is a bool with a
# non-int value
deck.s.statement("update fieldModels set editFontFamily = 1");
deck.version = 54
deck.s.commit()
if deck.version < 57:
deck.version = 57
deck.s.commit()
if deck.version < 61:
# do our best to upgrade templates to the new style
txt = '''\
<span style="font-family: %s; font-size: %spx; color: %s; white-space: pre-wrap;">%s</span>'''
for m in deck.models:
unstyled = []
for fm in m.fieldModels:
# find which fields had explicit formatting
if fm.quizFontFamily or fm.quizFontSize or fm.quizFontColour:
pass
else:
unstyled.append(fm.name)
# fill out missing info
fm.quizFontFamily = fm.quizFontFamily or u"Arial"
fm.quizFontSize = fm.quizFontSize or 20
fm.quizFontColour = fm.quizFontColour or "#000000"
fm.editFontSize = fm.editFontSize or 20
unstyled = set(unstyled)
for cm in m.cardModels:
# embed the old font information into card templates
cm.qformat = txt % (
cm.questionFontFamily,
cm.questionFontSize,
cm.questionFontColour,
cm.qformat)
cm.aformat = txt % (
cm.answerFontFamily,
cm.answerFontSize,
cm.answerFontColour,
cm.aformat)
# escape fields that had no previous styling
for un in unstyled:
cm.qformat = cm.qformat.replace("%("+un+")s", "{{{%s}}}"%un)
cm.aformat = cm.aformat.replace("%("+un+")s", "{{{%s}}}"%un)
# rebuild q/a for the above & because latex has changed
for m in deck.models:
deck.updateCardsFromModel(m, dirty=False)
# rebuild the media db based on new format
deck.version = 61
deck.s.commit()
if deck.version < 62:
# updated indices
for d in ("intervalDesc", "intervalAsc", "randomOrder",
"dueAsc", "dueDesc"):
deck.s.statement("drop index if exists ix_cards_%s2" % d)
deck.s.statement("drop index if exists ix_cards_typeCombined")
DeckStorage._addIndices(deck)
deck.updateDynamicIndices()
deck.s.execute("vacuum")
deck.version = 62
deck.s.commit()
if deck.version < 64:
# remove old static indices, as all clients should be libanki1.2+
for d in ("ix_cards_duePriority",
"ix_cards_priorityDue"):
deck.s.statement("drop index if exists %s" % d)
# remove old dynamic indices
for d in ("intervalDesc", "intervalAsc", "randomOrder",
"dueAsc", "dueDesc"):
deck.s.statement("drop index if exists ix_cards_%s" % d)
deck.s.execute("analyze")
deck.version = 64
deck.s.commit()
# note: we keep the priority index for now
if deck.version < 65:
# we weren't correctly setting relativeDelay when answering cards
# in previous versions, so ensure everything is set correctly
deck.rebuildTypes()
deck.version = 65
deck.s.commit()
# executing a pragma here is very slow on large decks, so we store
# our own record
if not deck.getInt("pageSize") == 4096:
deck.s.commit()
deck.s.execute("pragma page_size = 4096")
deck.s.execute("pragma legacy_file_format = 0")
deck.s.execute("vacuum")
deck.setVar("pageSize", 4096, mod=False)
deck.s.commit()
if prog:
assert deck.modified == oldmod
deck.finishProgress()
return deck
_upgradeDeck = staticmethod(_upgradeDeck)
def _setUTCOffset(deck):
# 4am
deck.utcOffset = time.timezone + 60*60*4
_setUTCOffset = staticmethod(_setUTCOffset)
def backup(deck, path):
"""Path must not be unicode."""
if not numBackups:
return
def escape(path):
path = os.path.abspath(path)
path = path.replace("\\", "!")
path = path.replace("/", "!")
path = path.replace(":", "")
return path
escp = escape(path)
# make sure backup dir exists
try:
os.makedirs(backupDir)
except (OSError, IOError):
pass
# find existing backups
gen = re.sub("\.oldanki$", ".backup-(\d+).oldanki", re.escape(escp))
backups = []
for file in os.listdir(backupDir):
m = re.match(gen, file)
if m:
backups.append((int(m.group(1)), file))
backups.sort()
# check if last backup is the same
if backups:
latest = os.path.join(backupDir, backups[-1][1])
if int(deck.modified) == int(
os.stat(latest)[stat.ST_MTIME]):
return
# check integrity
if not deck.s.scalar("pragma integrity_check") == "ok":
raise DeckAccessError(_("Deck is corrupt."), type="corrupt")
# get next num
if not backups:
n = 1
else:
n = backups[-1][0] + 1
# do backup
newpath = os.path.join(backupDir, os.path.basename(
re.sub("\.oldanki$", ".backup-%s.oldanki" % n, escp)))
shutil.copy2(path, newpath)
# set mtimes to be identical
if deck.modified:
os.utime(newpath, (deck.modified, deck.modified))
# remove if over
if len(backups) + 1 > numBackups:
delete = len(backups) + 1 - numBackups
delete = backups[:delete]
for file in delete:
os.unlink(os.path.join(backupDir, file[1]))
backup = staticmethod(backup)
def newCardOrderLabels():
return {
0: _("Show new cards in random order"),
1: _("Show new cards in order added"),
2: _("Show new cards in reverse order added"),
}
def newCardSchedulingLabels():
return {
0: _("Spread new cards out through reviews"),
1: _("Show new cards after all other cards"),
2: _("Show new cards before reviews"),
}
def revCardOrderLabels():
return {
0: _("Review cards from largest interval"),
1: _("Review cards from smallest interval"),
2: _("Review cards in order due"),
3: _("Review cards in random order"),
}
def failedCardOptionLabels():
return {
0: _("Show failed cards soon"),
1: _("Show failed cards at end"),
2: _("Show failed cards in 10 minutes"),
3: _("Show failed cards in 8 hours"),
4: _("Show failed cards in 3 days"),
5: _("Custom failed cards handling"),
}
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/deck.py
|
deck.py
|
from oldanki.template import Template
import os.path
import re
class View(object):
# Path where this view's template(s) live
template_path = '.'
# Extension for templates
template_extension = 'mustache'
# The name of this template. If none is given the View will try
# to infer it based on the class name.
template_name = None
# Absolute path to the template itself. Pystache will try to guess
# if it's not provided.
template_file = None
# Contents of the template.
template = None
# Character encoding of the template file. If None, Pystache will not
# do any decoding of the template.
template_encoding = None
def __init__(self, template=None, context=None, **kwargs):
self.template = template
self.context = context or {}
# If the context we're handed is a View, we want to inherit
# its settings.
if isinstance(context, View):
self.inherit_settings(context)
if kwargs:
self.context.update(kwargs)
def inherit_settings(self, view):
"""Given another View, copies its settings."""
if view.template_path:
self.template_path = view.template_path
if view.template_name:
self.template_name = view.template_name
def load_template(self):
if self.template:
return self.template
if self.template_file:
return self._load_template()
name = self.get_template_name() + '.' + self.template_extension
if isinstance(self.template_path, basestring):
self.template_file = os.path.join(self.template_path, name)
return self._load_template()
for path in self.template_path:
self.template_file = os.path.join(path, name)
if os.path.exists(self.template_file):
return self._load_template()
raise IOError('"%s" not found in "%s"' % (name, ':'.join(self.template_path),))
def _load_template(self):
f = open(self.template_file, 'r')
try:
template = f.read()
if self.template_encoding:
template = unicode(template, self.template_encoding)
finally:
f.close()
return template
def get_template_name(self, name=None):
"""TemplatePartial => template_partial
Takes a string but defaults to using the current class' name or
the `template_name` attribute
"""
if self.template_name:
return self.template_name
if not name:
name = self.__class__.__name__
def repl(match):
return '_' + match.group(0).lower()
return re.sub('[A-Z]', repl, name)[1:]
def __contains__(self, needle):
return needle in self.context or hasattr(self, needle)
def __getitem__(self, attr):
val = self.get(attr, None)
if not val:
raise KeyError("No such key.")
return val
def get(self, attr, default):
attr = self.context.get(attr, getattr(self, attr, default))
if hasattr(attr, '__call__'):
return attr()
else:
return attr
def render(self, encoding=None):
template = self.load_template()
return Template(template, self).render(encoding=encoding)
def __str__(self):
return self.render()
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/template/view.py
|
view.py
|
========
Pystache
========
Inspired by ctemplate_ and et_, Mustache_ is a
framework-agnostic way to render logic-free views.
As ctemplates says, "It emphasizes separating logic from presentation:
it is impossible to embed application logic in this template language."
Pystache is a Python implementation of Mustache. Pystache requires
Python 2.6.
Documentation
=============
The different Mustache tags are documented at `mustache(5)`_.
Install It
==========
::
pip install pystache
Use It
======
::
>>> import pystache
>>> pystache.render('Hi {{person}}!', {'person': 'Mom'})
'Hi Mom!'
You can also create dedicated view classes to hold your view logic.
Here's your simple.py::
import pystache
class Simple(pystache.View):
def thing(self):
return "pizza"
Then your template, simple.mustache::
Hi {{thing}}!
Pull it together::
>>> Simple().render()
'Hi pizza!'
Test It
=======
nose_ works great! ::
pip install nose
cd pystache
nosetests
Author
======
::
context = { 'author': 'Chris Wanstrath', 'email': '[email protected]' }
pystache.render("{{author}} :: {{email}}", context)
.. _ctemplate: http://code.google.com/p/google-ctemplate/
.. _et: http://www.ivan.fomichev.name/2008/05/erlang-template-engine-prototype.html
.. _Mustache: http://defunkt.github.com/mustache/
.. _mustache(5): http://defunkt.github.com/mustache/mustache.5.html
.. _nose: http://somethingaboutorange.com/mrl/projects/nose/0.11.1/testing.html
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/template/README.rst
|
README.rst
|
import re
import cgi
import collections
modifiers = {}
def modifier(symbol):
"""Decorator for associating a function with a Mustache tag modifier.
@modifier('P')
def render_tongue(self, tag_name=None, context=None):
return ":P %s" % tag_name
{{P yo }} => :P yo
"""
def set_modifier(func):
modifiers[symbol] = func
return func
return set_modifier
def get_or_attr(obj, name, default=None):
try:
return obj[name]
except KeyError:
return default
except:
try:
return getattr(obj, name)
except AttributeError:
return default
class Template(object):
# The regular expression used to find a #section
section_re = None
# The regular expression used to find a tag.
tag_re = None
# Opening tag delimiter
otag = '{{'
# Closing tag delimiter
ctag = '}}'
def __init__(self, template, context=None):
self.template = template
self.context = context or {}
self.compile_regexps()
def render(self, template=None, context=None, encoding=None):
"""Turns a Mustache template into something wonderful."""
template = template or self.template
context = context or self.context
template = self.render_sections(template, context)
result = self.render_tags(template, context)
if encoding is not None:
result = result.encode(encoding)
return result
def compile_regexps(self):
"""Compiles our section and tag regular expressions."""
tags = { 'otag': re.escape(self.otag), 'ctag': re.escape(self.ctag) }
section = r"%(otag)s[\#|^]([^\}]*)%(ctag)s(.+?)%(otag)s/\1%(ctag)s"
self.section_re = re.compile(section % tags, re.M|re.S)
tag = r"%(otag)s(#|=|&|!|>|\{)?(.+?)\1?%(ctag)s+"
self.tag_re = re.compile(tag % tags)
def render_sections(self, template, context):
"""Expands sections."""
while 1:
match = self.section_re.search(template)
if match is None:
break
section, section_name, inner = match.group(0, 1, 2)
section_name = section_name.strip()
it = get_or_attr(context, section_name, None)
replacer = ''
# if it and isinstance(it, collections.Callable):
# replacer = it(inner)
if it and not hasattr(it, '__iter__'):
if section[2] != '^':
replacer = inner
elif it and hasattr(it, 'keys') and hasattr(it, '__getitem__'):
if section[2] != '^':
replacer = self.render(inner, it)
elif it:
insides = []
for item in it:
insides.append(self.render(inner, item))
replacer = ''.join(insides)
elif not it and section[2] == '^':
replacer = inner
template = template.replace(section, replacer)
return template
def render_tags(self, template, context):
"""Renders all the tags in a template for a context."""
while 1:
match = self.tag_re.search(template)
if match is None:
break
tag, tag_type, tag_name = match.group(0, 1, 2)
tag_name = tag_name.strip()
try:
func = modifiers[tag_type]
replacement = func(self, tag_name, context)
template = template.replace(tag, replacement)
except:
return u"{{invalid template}}"
return template
@modifier('{')
def render_tag(self, tag_name, context):
"""Given a tag name and context, finds, escapes, and renders the tag."""
raw = get_or_attr(context, tag_name, '')
if not raw and raw is not 0:
return ''
return re.sub("^<span.+?>(.*)</span>", "\\1", raw)
@modifier('!')
def render_comment(self, tag_name=None, context=None):
"""Rendering a comment always returns nothing."""
return ''
@modifier(None)
def render_unescaped(self, tag_name=None, context=None):
"""Render a tag without escaping it."""
return unicode(get_or_attr(context, tag_name, '{unknown field %s}' % tag_name))
# @modifier('>')
# def render_partial(self, tag_name=None, context=None):
# """Renders a partial within the current context."""
# # Import view here to avoid import loop
# from pystache.view import View
# view = View(context=context)
# view.template_name = tag_name
# return view.render()
@modifier('=')
def render_delimiter(self, tag_name=None, context=None):
"""Changes the Mustache delimiter."""
self.otag, self.ctag = tag_name.split(' ')
self.compile_regexps()
return ''
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/oldanki/template/template.py
|
template.py
|
# Copyright (c) 2006-2010 Hubert Pham
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
""" PyAudio : Python Bindings for PortAudio v19.
**These bindings only support PortAudio blocking mode.**
:var PaSampleFormat:
A list of all PortAudio ``PaSampleFormat`` value constants.
See: `paInt32`, `paInt24`, `paInt16`, `paInt8`, and `paUInt8`.
:var PaHostApiTypeId:
A list of all PortAudio ``PaHostApiTypeId`` constants.
See: `paInDevelopment`, `paDirectSound`, `paMME`, `paASIO`,
`paSoundManager`, `paCoreAudio`, `paOSS`, `paALSA`, `paAL`, *et al...*
:var PaErrorCode:
A list of all PortAudio ``PaErrorCode`` constants.
Typically, error code constants are included in Python
exception objects (as the second argument).
See: `paNoError`, `paNotInitialized`, `paUnanticipatedHostError`,
*et al...*
:group PortAudio Constants:
PaSampleFormat, PaHostApiTypeId, PaErrorCode
:group PaSampleFormat Values:
paFloat32, paInt32, paInt24, paInt16,
paInt8, paUInt8, paCustomFormat
:group PaHostApiTypeId Values:
paInDevelopment, paDirectSound, paMME, paASIO,
paSoundManager, paCoreAudio, paOSS, paALSA
paAL, paBeOS, paWDMKS, paJACK, paWASAPI, paNoDevice
:group PaErrorCode Values:
paNoError,
paNotInitialized, paUnanticipatedHostError,
paInvalidChannelCount, paInvalidSampleRate,
paInvalidDevice, paInvalidFlag,
paSampleFormatNotSupported, paBadIODeviceCombination,
paInsufficientMemory, paBufferTooBig,
paBufferTooSmall, paNullCallback,
paBadStreamPtr, paTimedOut,
paInternalError, paDeviceUnavailable,
paIncompatibleHostApiSpecificStreamInfo, paStreamIsStopped,
paStreamIsNotStopped, paInputOverflowed,
paOutputUnderflowed, paHostApiNotFound,
paInvalidHostApi, paCanNotReadFromACallbackStream,
paCanNotWriteToACallbackStream,
paCanNotReadFromAnOutputOnlyStream,
paCanNotWriteToAnInputOnlyStream,
paIncompatibleStreamHostApi
:group Stream Conversion Convenience Functions:
get_sample_size, get_format_from_width
:group PortAudio version:
get_portaudio_version, get_portaudio_version_text
:sort: PaSampleFormat, PaHostApiTypeId, PaErrorCode
:sort: PortAudio Constants, PaSampleFormat Values,
PaHostApiTypeId Values, PaErrorCode Values
"""
__author__ = "Hubert Pham"
__version__ = "0.2.4"
__docformat__ = "restructuredtext en"
import _portaudio as pa
# Try to use Python 2.4's built in `set'
try:
a = set()
del a
except NameError:
from sets import Set as set
############################################################
# GLOBALS
############################################################
##### PaSampleFormat Sample Formats #####
paFloat32 = pa.paFloat32
paInt32 = pa.paInt32
paInt24 = pa.paInt24
paInt16 = pa.paInt16
paInt8 = pa.paInt8
paUInt8 = pa.paUInt8
paCustomFormat = pa.paCustomFormat
# group them together for epydoc
PaSampleFormat = ['paFloat32', 'paInt32', 'paInt24', 'paInt16',
'paInt8', 'paUInt8', 'paCustomFormat']
###### HostAPI TypeId #####
paInDevelopment = pa.paInDevelopment
paDirectSound = pa.paDirectSound
paMME = pa.paMME
paASIO = pa.paASIO
paSoundManager = pa.paSoundManager
paCoreAudio = pa.paCoreAudio
paOSS = pa.paOSS
paALSA = pa.paALSA
paAL = pa.paAL
paBeOS = pa.paBeOS
paWDMKS = pa.paWDMKS
paJACK = pa.paJACK
paWASAPI = pa.paWASAPI
paNoDevice = pa.paNoDevice
# group them together for epydoc
PaHostApiTypeId = ['paInDevelopment', 'paDirectSound', 'paMME',
'paASIO', 'paSoundManager', 'paCoreAudio',
'paOSS', 'paALSA', 'paAL', 'paBeOS',
'paWDMKS', 'paJACK', 'paWASAPI', 'paNoDevice']
###### portaudio error codes #####
paNoError = pa.paNoError
paNotInitialized = pa.paNotInitialized
paUnanticipatedHostError = pa.paUnanticipatedHostError
paInvalidChannelCount = pa.paInvalidChannelCount
paInvalidSampleRate = pa.paInvalidSampleRate
paInvalidDevice = pa.paInvalidDevice
paInvalidFlag = pa.paInvalidFlag
paSampleFormatNotSupported = pa.paSampleFormatNotSupported
paBadIODeviceCombination = pa.paBadIODeviceCombination
paInsufficientMemory = pa.paInsufficientMemory
paBufferTooBig = pa.paBufferTooBig
paBufferTooSmall = pa.paBufferTooSmall
paNullCallback = pa.paNullCallback
paBadStreamPtr = pa.paBadStreamPtr
paTimedOut = pa.paTimedOut
paInternalError = pa.paInternalError
paDeviceUnavailable = pa.paDeviceUnavailable
paIncompatibleHostApiSpecificStreamInfo = pa.paIncompatibleHostApiSpecificStreamInfo
paStreamIsStopped = pa.paStreamIsStopped
paStreamIsNotStopped = pa.paStreamIsNotStopped
paInputOverflowed = pa.paInputOverflowed
paOutputUnderflowed = pa.paOutputUnderflowed
paHostApiNotFound = pa.paHostApiNotFound
paInvalidHostApi = pa.paInvalidHostApi
paCanNotReadFromACallbackStream = pa.paCanNotReadFromACallbackStream
paCanNotWriteToACallbackStream = pa.paCanNotWriteToACallbackStream
paCanNotReadFromAnOutputOnlyStream = pa.paCanNotReadFromAnOutputOnlyStream
paCanNotWriteToAnInputOnlyStream = pa.paCanNotWriteToAnInputOnlyStream
paIncompatibleStreamHostApi = pa.paIncompatibleStreamHostApi
# group them together for epydoc
PaErrorCode = ['paNoError',
'paNotInitialized', 'paUnanticipatedHostError',
'paInvalidChannelCount', 'paInvalidSampleRate',
'paInvalidDevice', 'paInvalidFlag',
'paSampleFormatNotSupported', 'paBadIODeviceCombination',
'paInsufficientMemory', 'paBufferTooBig',
'paBufferTooSmall', 'paNullCallback',
'paBadStreamPtr', 'paTimedOut',
'paInternalError', 'paDeviceUnavailable',
'paIncompatibleHostApiSpecificStreamInfo', 'paStreamIsStopped',
'paStreamIsNotStopped', 'paInputOverflowed',
'paOutputUnderflowed', 'paHostApiNotFound',
'paInvalidHostApi', 'paCanNotReadFromACallbackStream',
'paCanNotWriteToACallbackStream',
'paCanNotReadFromAnOutputOnlyStream',
'paCanNotWriteToAnInputOnlyStream',
'paIncompatibleStreamHostApi']
############################################################
# Convenience Functions
############################################################
def get_sample_size(format):
"""
Returns the size (in bytes) for the specified
sample `format` (a `PaSampleFormat` constant).
:param `format`:
PortAudio sample format constant `PaSampleFormat`.
:raises ValueError: Invalid specified `format`.
:rtype: int
"""
return pa.get_sample_size(format)
def get_format_from_width(width, unsigned = True):
"""
Returns a PortAudio format constant for
the specified `width`.
:param `width`:
The desired sample width in bytes (1, 2, 3, or 4)
:param `unsigned`:
For 1 byte width, specifies signed or unsigned
format.
:raises ValueError: for invalid `width`
:rtype: `PaSampleFormat`
"""
if width == 1:
if unsigned:
return paUInt8
else:
return paInt8
elif width == 2:
return paInt16
elif width == 3:
return paInt24
elif width == 4:
return paFloat32
else:
raise ValueError, "Invalid width: %d" % width
############################################################
# Versioning
############################################################
def get_portaudio_version():
"""
Returns portaudio version.
:rtype: str """
return pa.get_version()
def get_portaudio_version_text():
"""
Returns PortAudio version as a text string.
:rtype: str """
return pa.get_version_text()
############################################################
# Wrapper around _portaudio Stream (Internal)
############################################################
# Note: See PyAudio class below for main export.
class Stream:
"""
PortAudio Stream Wrapper. Use `PyAudio.open` to make a new
`Stream`.
:group Opening and Closing:
__init__, close
:group Stream Info:
get_input_latency, get_output_latency, get_time, get_cpu_load
:group Stream Management:
start_stream, stop_stream, is_active, is_stopped
:group Input Output:
write, read, get_read_available, get_write_available
"""
def __init__(self,
PA_manager,
rate,
channels,
format,
input = False,
output = False,
input_device_index = None,
output_device_index = None,
frames_per_buffer = 1024,
start = True,
input_host_api_specific_stream_info = None,
output_host_api_specific_stream_info = None):
"""
Initialize a stream; this should be called by
`PyAudio.open`. A stream can either be input, output, or both.
:param `PA_manager`: A reference to the managing `PyAudio` instance
:param `rate`: Sampling rate
:param `channels`: Number of channels
:param `format`: Sampling size and format. See `PaSampleFormat`.
:param `input`: Specifies whether this is an input stream.
Defaults to False.
:param `output`: Specifies whether this is an output stream.
Defaults to False.
:param `input_device_index`: Index of Input Device to use.
Unspecified (or None) uses default device.
Ignored if `input` is False.
:param `output_device_index`:
Index of Output Device to use.
Unspecified (or None) uses the default device.
Ignored if `output` is False.
:param `frames_per_buffer`: Specifies the number of frames per buffer.
:param `start`: Start the stream running immediately.
Defaults to True. In general, there is no reason to set
this to false.
:param `input_host_api_specific_stream_info`: Specifies a host API
specific stream information data structure for input.
See `PaMacCoreStreamInfo`.
:param `output_host_api_specific_stream_info`: Specifies a host API
specific stream information data structure for output.
See `PaMacCoreStreamInfo`.
:raise ValueError: Neither input nor output
are set True.
"""
# no stupidity allowed
if not (input or output):
raise ValueError, \
"Must specify an input or output " +\
"stream."
# remember parent
self._parent = PA_manager
# remember if we are an: input, output (or both)
self._is_input = input
self._is_output = output
# are we running?
self._is_running = start
# remember some parameters
self._rate = rate
self._channels = channels
self._format = format
self._frames_per_buffer = frames_per_buffer
arguments = {
'rate' : rate,
'channels' : channels,
'format' : format,
'input' : input,
'output' : output,
'input_device_index' : input_device_index,
'output_device_index' : output_device_index,
'frames_per_buffer' : frames_per_buffer}
if input_host_api_specific_stream_info:
_l = input_host_api_specific_stream_info
arguments[
'input_host_api_specific_stream_info'
] = _l._get_host_api_stream_object()
if output_host_api_specific_stream_info:
_l = output_host_api_specific_stream_info
arguments[
'output_host_api_specific_stream_info'
] = _l._get_host_api_stream_object()
# calling pa.open returns a stream object
self._stream = pa.open(**arguments)
self._input_latency = self._stream.inputLatency
self._output_latency = self._stream.outputLatency
if self._is_running:
pa.start_stream(self._stream)
def close(self):
""" Close the stream """
pa.close(self._stream)
self._is_running = False
self._parent._remove_stream(self)
############################################################
# Stream Info
############################################################
def get_input_latency(self):
"""
Return the input latency.
:rtype: float
"""
return self._stream.inputLatency
def get_output_latency(self):
"""
Return the input latency.
:rtype: float
"""
return self._stream.outputLatency
def get_time(self):
"""
Return stream time.
:rtype: float
"""
return pa.get_stream_time(self._stream)
def get_cpu_load(self):
"""
Return the CPU load.
(Note: this is always 0.0 for the blocking API.)
:rtype: float
"""
return pa.get_stream_cpu_load(self._stream)
############################################################
# Stream Management
############################################################
def start_stream(self):
""" Start the stream. """
if self._is_running:
return
pa.start_stream(self._stream)
self._is_running = True
def stop_stream(self):
""" Stop the stream. Once the stream is stopped,
one may not call write or read. However, one may
call start_stream to resume the stream. """
if not self._is_running:
return
pa.stop_stream(self._stream)
self._is_running = False
def is_active(self):
""" Returns whether the stream is active.
:rtype: bool """
return pa.is_stream_active(self._stream)
def is_stopped(self):
""" Returns whether the stream is stopped.
:rtype: bool """
return pa.is_stream_stopped(self._stream)
############################################################
# Reading/Writing
############################################################
def write(self, frames, num_frames = None,
exception_on_underflow = False):
"""
Write samples to the stream.
:param `frames`:
The frames of data.
:param `num_frames`:
The number of frames to write.
Defaults to None, in which this value will be
automatically computed.
:param `exception_on_underflow`:
Specifies whether an exception should be thrown
(or silently ignored) on buffer underflow. Defaults
to False for improved performance, especially on
slower platforms.
:raises IOError: if the stream is not an output stream
or if the write operation was unsuccessful.
:rtype: `None`
"""
if not self._is_output:
raise IOError("Not output stream",
paCanNotWriteToAnInputOnlyStream)
if num_frames == None:
# determine how many frames to read
width = get_sample_size(self._format)
num_frames = len(frames) / (self._channels * width)
#print len(frames), self._channels, self._width, num_frames
pa.write_stream(self._stream, frames, num_frames,
exception_on_underflow)
def read(self, num_frames):
"""
Read samples from the stream.
:param `num_frames`:
The number of frames to read.
:raises IOError: if stream is not an input stream
or if the read operation was unsuccessful.
:rtype: str
"""
if not self._is_input:
raise IOError("Not input stream",
paCanNotReadFromAnOutputOnlyStream)
return pa.read_stream(self._stream, num_frames)
def get_read_available(self):
"""
Return the number of frames that can be read
without waiting.
:rtype: int
"""
return pa.get_stream_read_available(self._stream)
def get_write_available(self):
"""
Return the number of frames that can be written
without waiting.
:rtype: int
"""
return pa.get_stream_write_available(self._stream)
############################################################
# Main Export
############################################################
class PyAudio:
"""
Python interface to PortAudio. Provides methods to:
- initialize and terminate PortAudio
- open and close streams
- query and inspect the available PortAudio Host APIs
- query and inspect the available PortAudio audio
devices
Use this class to open and close streams.
:group Stream Management:
open, close
:group Host API:
get_host_api_count, get_default_host_api_info,
get_host_api_info_by_type, get_host_api_info_by_index,
get_device_info_by_host_api_device_index
:group Device API:
get_device_count, is_format_supported,
get_default_input_device_info,
get_default_output_device_info,
get_device_info_by_index
:group Stream Format Conversion:
get_sample_size, get_format_from_width
"""
############################################################
# Initialization and Termination
############################################################
def __init__(self):
""" Initialize PortAudio. """
pa.initialize()
self._streams = set()
def terminate(self):
""" Terminate PortAudio.
:attention: Be sure to call this method for every
instance of this object to release PortAudio resources.
"""
for stream in self._streams:
stream.close()
self._streams = set()
pa.terminate()
############################################################
# Stream Format
############################################################
def get_sample_size(self, format):
"""
Returns the size (in bytes) for the specified
sample `format` (a `PaSampleFormat` constant).
:param `format`:
Sample format constant (`PaSampleFormat`).
:raises ValueError: Invalid specified `format`.
:rtype: int
"""
return pa.get_sample_size(format)
def get_format_from_width(self, width, unsigned = True):
"""
Returns a PortAudio format constant for
the specified `width`.
:param `width`:
The desired sample width in bytes (1, 2, 3, or 4)
:param `unsigned`:
For 1 byte width, specifies signed or unsigned format.
:raises ValueError: for invalid `width`
:rtype: `PaSampleFormat`
"""
if width == 1:
if unsigned:
return paUInt8
else:
return paInt8
elif width == 2:
return paInt16
elif width == 3:
return paInt24
elif width == 4:
return paFloat32
else:
raise ValueError, "Invalid width: %d" % width
############################################################
# Stream Factory
############################################################
def open(self, *args, **kwargs):
"""
Open a new stream. See constructor for
`Stream.__init__` for parameter details.
:returns: `Stream` """
stream = Stream(self, *args, **kwargs)
self._streams.add(stream)
return stream
def close(self, stream):
"""
Close a stream. Typically use `Stream.close` instead.
:param `stream`:
An instance of the `Stream` object.
:raises ValueError: if stream does not exist.
"""
if stream not in self._streams:
raise ValueError, "Stream `%s' not found" % str(stream)
stream.close()
def _remove_stream(self, stream):
"""
Internal method. Removes a stream.
:param `stream`:
An instance of the `Stream` object.
"""
if stream in self._streams:
self._streams.remove(stream)
############################################################
# Host API Inspection
############################################################
def get_host_api_count(self):
"""
Return the number of PortAudio Host APIs.
:rtype: int
"""
return pa.get_host_api_count()
def get_default_host_api_info(self):
"""
Return a dictionary containing the default Host API
parameters. The keys of the dictionary mirror the data fields
of PortAudio's ``PaHostApiInfo`` structure.
:raises IOError: if no default input device available
:rtype: dict
"""
defaultHostApiIndex = pa.get_default_host_api()
return self.get_host_api_info_by_index(defaultHostApiIndex)
def get_host_api_info_by_type(self, host_api_type):
"""
Return a dictionary containing the Host API parameters for the
host API specified by the `host_api_type`. The keys of the
dictionary mirror the data fields of PortAudio's ``PaHostApiInfo``
structure.
:param `host_api_type`:
The desired Host API (`PaHostApiTypeId` constant).
:raises IOError: for invalid `host_api_type`
:rtype: dict
"""
index = pa.host_api_type_id_to_host_api_index(host_api_type)
return self.get_host_api_info_by_index(index)
def get_host_api_info_by_index(self, host_api_index):
"""
Return a dictionary containing the Host API parameters for the
host API specified by the `host_api_index`. The keys of the
dictionary mirror the data fields of PortAudio's ``PaHostApiInfo``
structure.
:param `host_api_index`: The host api index.
:raises IOError: for invalid `host_api_index`
:rtype: dict
"""
return self._make_host_api_dictionary(
host_api_index,
pa.get_host_api_info(host_api_index)
)
def get_device_info_by_host_api_device_index(self,
host_api_index,
host_api_device_index):
"""
Return a dictionary containing the Device parameters for a
given Host API's n'th device. The keys of the dictionary
mirror the data fields of PortAudio's ``PaDeviceInfo`` structure.
:param `host_api_index`:
The Host API index number.
:param `host_api_device_index`:
The *n* 'th device of the host API.
:raises IOError: for invalid indices
:rtype: dict
"""
long_method_name = pa.host_api_device_index_to_device_index
device_index = long_method_name(host_api_index,
host_api_device_index)
return self.get_device_info_by_index(device_index)
def _make_host_api_dictionary(self, index, host_api_struct):
"""
Internal method to create Host API dictionary
that mirrors PortAudio's ``PaHostApiInfo`` structure.
:rtype: dict
"""
return {'index' : index,
'structVersion' : host_api_struct.structVersion,
'type' : host_api_struct.type,
'name' : host_api_struct.name,
'deviceCount' : host_api_struct.deviceCount,
'defaultInputDevice' : host_api_struct.defaultInputDevice,
'defaultOutputDevice' : host_api_struct.defaultOutputDevice}
############################################################
# Device Inspection
############################################################
def get_device_count(self):
"""
Return the number of PortAudio Host APIs.
:rtype: int
"""
return pa.get_device_count()
def is_format_supported(self, rate,
input_device = None,
input_channels = None,
input_format = None,
output_device = None,
output_channels = None,
output_format = None):
"""
Check to see if specified device configuration
is supported. Returns True if the configuration
is supported; throws a ValueError exception otherwise.
:param `rate`:
Specifies the desired rate (in Hz)
:param `input_device`:
The input device index. Specify `None` (default) for
half-duplex output-only streams.
:param `input_channels`:
The desired number of input channels. Ignored if
`input_device` is not specified (or `None`).
:param `input_format`:
PortAudio sample format constant defined
in this module
:param `output_device`:
The output device index. Specify `None` (default) for
half-duplex input-only streams.
:param `output_channels`:
The desired number of output channels. Ignored if
`input_device` is not specified (or `None`).
:param `output_format`:
PortAudio sample format constant (`PaSampleFormat`).
:rtype: bool
:raises ValueError: tuple containing:
(error string, PortAudio error code `PaErrorCode`).
"""
if input_device == None and output_device == None:
raise ValueError("must specify stream format for input, " +\
"output, or both", paInvalidDevice);
kwargs = {}
if input_device != None:
kwargs['input_device'] = input_device
kwargs['input_channels'] = input_channels
kwargs['input_format'] = input_format
if output_device != None:
kwargs['output_device'] = output_device
kwargs['output_channels'] = output_channels
kwargs['output_format'] = output_format
return pa.is_format_supported(rate, **kwargs)
def get_default_input_device_info(self):
"""
Return the default input Device parameters as a
dictionary. The keys of the dictionary mirror the data fields
of PortAudio's ``PaDeviceInfo`` structure.
:raises IOError: No default input device available.
:rtype: dict
"""
device_index = pa.get_default_input_device()
return self.get_device_info_by_index(device_index)
def get_default_output_device_info(self):
"""
Return the default output Device parameters as a
dictionary. The keys of the dictionary mirror the data fields
of PortAudio's ``PaDeviceInfo`` structure.
:raises IOError: No default output device available.
:rtype: dict
"""
device_index = pa.get_default_output_device()
return self.get_device_info_by_index(device_index)
def get_device_info_by_index(self, device_index):
"""
Return the Device parameters for device specified in
`device_index` as a dictionary. The keys of the dictionary
mirror the data fields of PortAudio's ``PaDeviceInfo``
structure.
:param `device_index`: The device index.
:raises IOError: Invalid `device_index`.
:rtype: dict
"""
return self._make_device_info_dictionary(
device_index,
pa.get_device_info(device_index)
)
def _make_device_info_dictionary(self, index, device_info):
"""
Internal method to create Device Info dictionary
that mirrors PortAudio's ``PaDeviceInfo`` structure.
:rtype: dict
"""
return {'index' : index,
'structVersion' : device_info.structVersion,
'name' : device_info.name,
'hostApi' : device_info.hostApi,
'maxInputChannels' : device_info.maxInputChannels,
'maxOutputChannels' : device_info.maxOutputChannels,
'defaultLowInputLatency' :
device_info.defaultLowInputLatency,
'defaultLowOutputLatency' :
device_info.defaultLowOutputLatency,
'defaultHighInputLatency' :
device_info.defaultHighInputLatency,
'defaultHighOutputLatency' :
device_info.defaultHighOutputLatency,
'defaultSampleRate' :
device_info.defaultSampleRate
}
######################################################################
# Host Specific Stream Info
######################################################################
try:
paMacCoreStreamInfo = pa.paMacCoreStreamInfo
except AttributeError:
pass
else:
class PaMacCoreStreamInfo:
"""
Mac OS X-only: PaMacCoreStreamInfo is a PortAudio Host API
Specific Stream Info data structure for specifying Mac OS
X-only settings. Instantiate this class (if desired) and pass
the instance as the argument in `PyAudio.open` to parameters
``input_host_api_specific_stream_info`` or
``output_host_api_specific_stream_info``. (See `Stream.__init__`.)
:note: Mac OS X only.
:group Flags (constants):
paMacCoreChangeDeviceParameters, paMacCoreFailIfConversionRequired,
paMacCoreConversionQualityMin, paMacCoreConversionQualityMedium,
paMacCoreConversionQualityLow, paMacCoreConversionQualityHigh,
paMacCoreConversionQualityMax, paMacCorePlayNice,
paMacCorePro, paMacCoreMinimizeCPUButPlayNice, paMacCoreMinimizeCPU
:group Settings:
get_flags, get_channel_map
"""
paMacCoreChangeDeviceParameters = pa.paMacCoreChangeDeviceParameters
paMacCoreFailIfConversionRequired = pa.paMacCoreFailIfConversionRequired
paMacCoreConversionQualityMin = pa.paMacCoreConversionQualityMin
paMacCoreConversionQualityMedium = pa.paMacCoreConversionQualityMedium
paMacCoreConversionQualityLow = pa.paMacCoreConversionQualityLow
paMacCoreConversionQualityHigh = pa.paMacCoreConversionQualityHigh
paMacCoreConversionQualityMax = pa.paMacCoreConversionQualityMax
paMacCorePlayNice = pa.paMacCorePlayNice
paMacCorePro = pa.paMacCorePro
paMacCoreMinimizeCPUButPlayNice = pa.paMacCoreMinimizeCPUButPlayNice
paMacCoreMinimizeCPU = pa.paMacCoreMinimizeCPU
def __init__(self, flags = None, channel_map = None):
"""
Initialize with flags and channel_map. See PortAudio
documentation for more details on these parameters; they are
passed almost verbatim to the PortAudio library.
:param `flags`: paMacCore* flags OR'ed together.
See `PaMacCoreStreamInfo`.
:param `channel_map`: An array describing the channel mapping.
See PortAudio documentation for usage.
"""
kwargs = {"flags" : flags,
"channel_map" : channel_map}
if flags == None:
del kwargs["flags"]
if channel_map == None:
del kwargs["channel_map"]
self._paMacCoreStreamInfo = paMacCoreStreamInfo(**kwargs)
def get_flags(self):
"""
Return the flags set at instantiation.
:rtype: int
"""
return self._paMacCoreStreamInfo.flags
def get_channel_map(self):
"""
Return the channel map set at instantiation.
:rtype: tuple or None
"""
return self._paMacCoreStreamInfo.channel_map
def _get_host_api_stream_object(self):
""" Private method. """
return self._paMacCoreStreamInfo
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/thirdparty/pyaudio.py
|
pyaudio.py
|
from __future__ import generators
__author__ = "Leonard Richardson ([email protected])"
__version__ = "3.2.1"
__copyright__ = "Copyright (c) 2004-2012 Leonard Richardson"
__license__ = "New-style BSD"
from sgmllib import SGMLParser, SGMLParseError
import codecs
import markupbase
import re
import sgmllib
try:
from htmlentitydefs import name2codepoint
except ImportError:
name2codepoint = {}
try:
set
except NameError:
from sets import Set as set
#These hacks make Beautiful Soup able to parse XML with namespaces
sgmllib.tagfind = re.compile('[a-zA-Z][-_.:a-zA-Z0-9]*')
markupbase._declname_match = re.compile(r'[a-zA-Z][-_.:a-zA-Z0-9]*\s*').match
DEFAULT_OUTPUT_ENCODING = "utf-8"
def _match_css_class(str):
"""Build a RE to match the given CSS class."""
return re.compile(r"(^|.*\s)%s($|\s)" % str)
# First, the classes that represent markup elements.
class PageElement(object):
"""Contains the navigational information for some part of the page
(either a tag or a piece of text)"""
def _invert(h):
"Cheap function to invert a hash."
i = {}
for k,v in h.items():
i[v] = k
return i
XML_ENTITIES_TO_SPECIAL_CHARS = { "apos" : "'",
"quot" : '"',
"amp" : "&",
"lt" : "<",
"gt" : ">" }
XML_SPECIAL_CHARS_TO_ENTITIES = _invert(XML_ENTITIES_TO_SPECIAL_CHARS)
def setup(self, parent=None, previous=None):
"""Sets up the initial relations between this element and
other elements."""
self.parent = parent
self.previous = previous
self.next = None
self.previousSibling = None
self.nextSibling = None
if self.parent and self.parent.contents:
self.previousSibling = self.parent.contents[-1]
self.previousSibling.nextSibling = self
def replaceWith(self, replaceWith):
oldParent = self.parent
myIndex = self.parent.index(self)
if hasattr(replaceWith, "parent")\
and replaceWith.parent is self.parent:
# We're replacing this element with one of its siblings.
index = replaceWith.parent.index(replaceWith)
if index and index < myIndex:
# Furthermore, it comes before this element. That
# means that when we extract it, the index of this
# element will change.
myIndex = myIndex - 1
self.extract()
oldParent.insert(myIndex, replaceWith)
def replaceWithChildren(self):
myParent = self.parent
myIndex = self.parent.index(self)
self.extract()
reversedChildren = list(self.contents)
reversedChildren.reverse()
for child in reversedChildren:
myParent.insert(myIndex, child)
def extract(self):
"""Destructively rips this element out of the tree."""
if self.parent:
try:
del self.parent.contents[self.parent.index(self)]
except ValueError:
pass
#Find the two elements that would be next to each other if
#this element (and any children) hadn't been parsed. Connect
#the two.
lastChild = self._lastRecursiveChild()
nextElement = lastChild.next
if self.previous:
self.previous.next = nextElement
if nextElement:
nextElement.previous = self.previous
self.previous = None
lastChild.next = None
self.parent = None
if self.previousSibling:
self.previousSibling.nextSibling = self.nextSibling
if self.nextSibling:
self.nextSibling.previousSibling = self.previousSibling
self.previousSibling = self.nextSibling = None
return self
def _lastRecursiveChild(self):
"Finds the last element beneath this object to be parsed."
lastChild = self
while hasattr(lastChild, 'contents') and lastChild.contents:
lastChild = lastChild.contents[-1]
return lastChild
def insert(self, position, newChild):
if isinstance(newChild, basestring) \
and not isinstance(newChild, NavigableString):
newChild = NavigableString(newChild)
position = min(position, len(self.contents))
if hasattr(newChild, 'parent') and newChild.parent is not None:
# We're 'inserting' an element that's already one
# of this object's children.
if newChild.parent is self:
index = self.index(newChild)
if index > position:
# Furthermore we're moving it further down the
# list of this object's children. That means that
# when we extract this element, our target index
# will jump down one.
position = position - 1
newChild.extract()
newChild.parent = self
previousChild = None
if position == 0:
newChild.previousSibling = None
newChild.previous = self
else:
previousChild = self.contents[position-1]
newChild.previousSibling = previousChild
newChild.previousSibling.nextSibling = newChild
newChild.previous = previousChild._lastRecursiveChild()
if newChild.previous:
newChild.previous.next = newChild
newChildsLastElement = newChild._lastRecursiveChild()
if position >= len(self.contents):
newChild.nextSibling = None
parent = self
parentsNextSibling = None
while not parentsNextSibling:
parentsNextSibling = parent.nextSibling
parent = parent.parent
if not parent: # This is the last element in the document.
break
if parentsNextSibling:
newChildsLastElement.next = parentsNextSibling
else:
newChildsLastElement.next = None
else:
nextChild = self.contents[position]
newChild.nextSibling = nextChild
if newChild.nextSibling:
newChild.nextSibling.previousSibling = newChild
newChildsLastElement.next = nextChild
if newChildsLastElement.next:
newChildsLastElement.next.previous = newChildsLastElement
self.contents.insert(position, newChild)
def append(self, tag):
"""Appends the given tag to the contents of this tag."""
self.insert(len(self.contents), tag)
def findNext(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the first item that matches the given criteria and
appears after this Tag in the document."""
return self._findOne(self.findAllNext, name, attrs, text, **kwargs)
def findAllNext(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns all items that match the given criteria and appear
after this Tag in the document."""
return self._findAll(name, attrs, text, limit, self.nextGenerator,
**kwargs)
def findNextSibling(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the closest sibling to this Tag that matches the
given criteria and appears after this Tag in the document."""
return self._findOne(self.findNextSiblings, name, attrs, text,
**kwargs)
def findNextSiblings(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns the siblings of this Tag that match the given
criteria and appear after this Tag in the document."""
return self._findAll(name, attrs, text, limit,
self.nextSiblingGenerator, **kwargs)
fetchNextSiblings = findNextSiblings # Compatibility with pre-3.x
def findPrevious(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the first item that matches the given criteria and
appears before this Tag in the document."""
return self._findOne(self.findAllPrevious, name, attrs, text, **kwargs)
def findAllPrevious(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns all items that match the given criteria and appear
before this Tag in the document."""
return self._findAll(name, attrs, text, limit, self.previousGenerator,
**kwargs)
fetchPrevious = findAllPrevious # Compatibility with pre-3.x
def findPreviousSibling(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the closest sibling to this Tag that matches the
given criteria and appears before this Tag in the document."""
return self._findOne(self.findPreviousSiblings, name, attrs, text,
**kwargs)
def findPreviousSiblings(self, name=None, attrs={}, text=None,
limit=None, **kwargs):
"""Returns the siblings of this Tag that match the given
criteria and appear before this Tag in the document."""
return self._findAll(name, attrs, text, limit,
self.previousSiblingGenerator, **kwargs)
fetchPreviousSiblings = findPreviousSiblings # Compatibility with pre-3.x
def findParent(self, name=None, attrs={}, **kwargs):
"""Returns the closest parent of this Tag that matches the given
criteria."""
# NOTE: We can't use _findOne because findParents takes a different
# set of arguments.
r = None
l = self.findParents(name, attrs, 1)
if l:
r = l[0]
return r
def findParents(self, name=None, attrs={}, limit=None, **kwargs):
"""Returns the parents of this Tag that match the given
criteria."""
return self._findAll(name, attrs, None, limit, self.parentGenerator,
**kwargs)
fetchParents = findParents # Compatibility with pre-3.x
#These methods do the real heavy lifting.
def _findOne(self, method, name, attrs, text, **kwargs):
r = None
l = method(name, attrs, text, 1, **kwargs)
if l:
r = l[0]
return r
def _findAll(self, name, attrs, text, limit, generator, **kwargs):
"Iterates over a generator looking for things that match."
if isinstance(name, SoupStrainer):
strainer = name
# (Possibly) special case some findAll*(...) searches
elif text is None and not limit and not attrs and not kwargs:
# findAll*(True)
if name is True:
return [element for element in generator()
if isinstance(element, Tag)]
# findAll*('tag-name')
elif isinstance(name, basestring):
return [element for element in generator()
if isinstance(element, Tag) and
element.name == name]
else:
strainer = SoupStrainer(name, attrs, text, **kwargs)
# Build a SoupStrainer
else:
strainer = SoupStrainer(name, attrs, text, **kwargs)
results = ResultSet(strainer)
g = generator()
while True:
try:
i = g.next()
except StopIteration:
break
if i:
found = strainer.search(i)
if found:
results.append(found)
if limit and len(results) >= limit:
break
return results
#These Generators can be used to navigate starting from both
#NavigableStrings and Tags.
def nextGenerator(self):
i = self
while i is not None:
i = i.next
yield i
def nextSiblingGenerator(self):
i = self
while i is not None:
i = i.nextSibling
yield i
def previousGenerator(self):
i = self
while i is not None:
i = i.previous
yield i
def previousSiblingGenerator(self):
i = self
while i is not None:
i = i.previousSibling
yield i
def parentGenerator(self):
i = self
while i is not None:
i = i.parent
yield i
# Utility methods
def substituteEncoding(self, str, encoding=None):
encoding = encoding or "utf-8"
return str.replace("%SOUP-ENCODING%", encoding)
def toEncoding(self, s, encoding=None):
"""Encodes an object to a string in some encoding, or to Unicode.
."""
if isinstance(s, unicode):
if encoding:
s = s.encode(encoding)
elif isinstance(s, str):
if encoding:
s = s.encode(encoding)
else:
s = unicode(s)
else:
if encoding:
s = self.toEncoding(str(s), encoding)
else:
s = unicode(s)
return s
BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
+ "&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
+ ")")
def _sub_entity(self, x):
"""Used with a regular expression to substitute the
appropriate XML entity for an XML special character."""
return "&" + self.XML_SPECIAL_CHARS_TO_ENTITIES[x.group(0)[0]] + ";"
class NavigableString(unicode, PageElement):
def __new__(cls, value):
"""Create a new NavigableString.
When unpickling a NavigableString, this method is called with
the string in DEFAULT_OUTPUT_ENCODING. That encoding needs to be
passed in to the superclass's __new__ or the superclass won't know
how to handle non-ASCII characters.
"""
if isinstance(value, unicode):
return unicode.__new__(cls, value)
return unicode.__new__(cls, value, DEFAULT_OUTPUT_ENCODING)
def __getnewargs__(self):
return (NavigableString.__str__(self),)
def __getattr__(self, attr):
"""text.string gives you text. This is for backwards
compatibility for Navigable*String, but for CData* it lets you
get the string without the CData wrapper."""
if attr == 'string':
return self
else:
raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__.__name__, attr)
def __unicode__(self):
return str(self).decode(DEFAULT_OUTPUT_ENCODING)
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
# Substitute outgoing XML entities.
data = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, self)
if encoding:
return data.encode(encoding)
else:
return data
class CData(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<![CDATA[%s]]>" % NavigableString.__str__(self, encoding)
class ProcessingInstruction(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
output = self
if "%SOUP-ENCODING%" in output:
output = self.substituteEncoding(output, encoding)
return "<?%s?>" % self.toEncoding(output, encoding)
class Comment(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<!--%s-->" % NavigableString.__str__(self, encoding)
class Declaration(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<!%s>" % NavigableString.__str__(self, encoding)
class Tag(PageElement):
"""Represents a found HTML tag with its attributes and contents."""
def _convertEntities(self, match):
"""Used in a call to re.sub to replace HTML, XML, and numeric
entities with the appropriate Unicode characters. If HTML
entities are being converted, any unrecognized entities are
escaped."""
x = match.group(1)
if self.convertHTMLEntities and x in name2codepoint:
return unichr(name2codepoint[x])
elif x in self.XML_ENTITIES_TO_SPECIAL_CHARS:
if self.convertXMLEntities:
return self.XML_ENTITIES_TO_SPECIAL_CHARS[x]
else:
return u'&%s;' % x
elif len(x) > 0 and x[0] == '#':
# Handle numeric entities
if len(x) > 1 and x[1] == 'x':
return unichr(int(x[2:], 16))
else:
return unichr(int(x[1:]))
elif self.escapeUnrecognizedEntities:
return u'&%s;' % x
else:
return u'&%s;' % x
def __init__(self, parser, name, attrs=None, parent=None,
previous=None):
"Basic constructor."
# We don't actually store the parser object: that lets extracted
# chunks be garbage-collected
self.parserClass = parser.__class__
self.isSelfClosing = parser.isSelfClosingTag(name)
self.name = name
if attrs is None:
attrs = []
elif isinstance(attrs, dict):
attrs = attrs.items()
self.attrs = attrs
self.contents = []
self.setup(parent, previous)
self.hidden = False
self.containsSubstitutions = False
self.convertHTMLEntities = parser.convertHTMLEntities
self.convertXMLEntities = parser.convertXMLEntities
self.escapeUnrecognizedEntities = parser.escapeUnrecognizedEntities
# Convert any HTML, XML, or numeric entities in the attribute values.
convert = lambda(k, val): (k,
re.sub("&(#\d+|#x[0-9a-fA-F]+|\w+);",
self._convertEntities,
val))
self.attrs = map(convert, self.attrs)
def getString(self):
if (len(self.contents) == 1
and isinstance(self.contents[0], NavigableString)):
return self.contents[0]
def setString(self, string):
"""Replace the contents of the tag with a string"""
self.clear()
self.append(string)
string = property(getString, setString)
def getText(self, separator=u""):
if not len(self.contents):
return u""
stopNode = self._lastRecursiveChild().next
strings = []
current = self.contents[0]
while current is not stopNode:
if isinstance(current, NavigableString):
strings.append(current.strip())
current = current.next
return separator.join(strings)
text = property(getText)
def get(self, key, default=None):
"""Returns the value of the 'key' attribute for the tag, or
the value given for 'default' if it doesn't have that
attribute."""
return self._getAttrMap().get(key, default)
def clear(self):
"""Extract all children."""
for child in self.contents[:]:
child.extract()
def index(self, element):
for i, child in enumerate(self.contents):
if child is element:
return i
raise ValueError("Tag.index: element not in tag")
def has_key(self, key):
return self._getAttrMap().has_key(key)
def __getitem__(self, key):
"""tag[key] returns the value of the 'key' attribute for the tag,
and throws an exception if it's not there."""
return self._getAttrMap()[key]
def __iter__(self):
"Iterating over a tag iterates over its contents."
return iter(self.contents)
def __len__(self):
"The length of a tag is the length of its list of contents."
return len(self.contents)
def __contains__(self, x):
return x in self.contents
def __nonzero__(self):
"A tag is non-None even if it has no contents."
return True
def __setitem__(self, key, value):
"""Setting tag[key] sets the value of the 'key' attribute for the
tag."""
self._getAttrMap()
self.attrMap[key] = value
found = False
for i in range(0, len(self.attrs)):
if self.attrs[i][0] == key:
self.attrs[i] = (key, value)
found = True
if not found:
self.attrs.append((key, value))
self._getAttrMap()[key] = value
def __delitem__(self, key):
"Deleting tag[key] deletes all 'key' attributes for the tag."
for item in self.attrs:
if item[0] == key:
self.attrs.remove(item)
#We don't break because bad HTML can define the same
#attribute multiple times.
self._getAttrMap()
if self.attrMap.has_key(key):
del self.attrMap[key]
def __call__(self, *args, **kwargs):
"""Calling a tag like a function is the same as calling its
findAll() method. Eg. tag('a') returns a list of all the A tags
found within this tag."""
return apply(self.findAll, args, kwargs)
def __getattr__(self, tag):
#print "Getattr %s.%s" % (self.__class__, tag)
if len(tag) > 3 and tag.rfind('Tag') == len(tag)-3:
return self.find(tag[:-3])
elif tag.find('__') != 0:
return self.find(tag)
raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__, tag)
def __eq__(self, other):
"""Returns true iff this tag has the same name, the same attributes,
and the same contents (recursively) as the given tag.
NOTE: right now this will return false if two tags have the
same attributes in a different order. Should this be fixed?"""
if other is self:
return True
if not hasattr(other, 'name') or not hasattr(other, 'attrs') or not hasattr(other, 'contents') or self.name != other.name or self.attrs != other.attrs or len(self) != len(other):
return False
for i in range(0, len(self.contents)):
if self.contents[i] != other.contents[i]:
return False
return True
def __ne__(self, other):
"""Returns true iff this tag is not identical to the other tag,
as defined in __eq__."""
return not self == other
def __repr__(self, encoding=DEFAULT_OUTPUT_ENCODING):
"""Renders this tag as a string."""
return self.__str__(encoding)
def __unicode__(self):
return self.__str__(None)
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING,
prettyPrint=False, indentLevel=0):
"""Returns a string or Unicode representation of this tag and
its contents. To get Unicode, pass None for encoding.
NOTE: since Python's HTML parser consumes whitespace, this
method is not certain to reproduce the whitespace present in
the original string."""
encodedName = self.toEncoding(self.name, encoding)
attrs = []
if self.attrs:
for key, val in self.attrs:
fmt = '%s="%s"'
if isinstance(val, basestring):
if self.containsSubstitutions and '%SOUP-ENCODING%' in val:
val = self.substituteEncoding(val, encoding)
# The attribute value either:
#
# * Contains no embedded double quotes or single quotes.
# No problem: we enclose it in double quotes.
# * Contains embedded single quotes. No problem:
# double quotes work here too.
# * Contains embedded double quotes. No problem:
# we enclose it in single quotes.
# * Embeds both single _and_ double quotes. This
# can't happen naturally, but it can happen if
# you modify an attribute value after parsing
# the document. Now we have a bit of a
# problem. We solve it by enclosing the
# attribute in single quotes, and escaping any
# embedded single quotes to XML entities.
if '"' in val:
fmt = "%s='%s'"
if "'" in val:
# TODO: replace with apos when
# appropriate.
val = val.replace("'", "&squot;")
# Now we're okay w/r/t quotes. But the attribute
# value might also contain angle brackets, or
# ampersands that aren't part of entities. We need
# to escape those to XML entities too.
val = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, val)
attrs.append(fmt % (self.toEncoding(key, encoding),
self.toEncoding(val, encoding)))
close = ''
closeTag = ''
if self.isSelfClosing:
close = ' /'
else:
closeTag = '</%s>' % encodedName
indentTag, indentContents = 0, 0
if prettyPrint:
indentTag = indentLevel
space = (' ' * (indentTag-1))
indentContents = indentTag + 1
contents = self.renderContents(encoding, prettyPrint, indentContents)
if self.hidden:
s = contents
else:
s = []
attributeString = ''
if attrs:
attributeString = ' ' + ' '.join(attrs)
if prettyPrint:
s.append(space)
s.append('<%s%s%s>' % (encodedName, attributeString, close))
if prettyPrint:
s.append("\n")
s.append(contents)
if prettyPrint and contents and contents[-1] != "\n":
s.append("\n")
if prettyPrint and closeTag:
s.append(space)
s.append(closeTag)
if prettyPrint and closeTag and self.nextSibling:
s.append("\n")
s = ''.join(s)
return s
def decompose(self):
"""Recursively destroys the contents of this tree."""
self.extract()
if len(self.contents) == 0:
return
current = self.contents[0]
while current is not None:
next = current.next
if isinstance(current, Tag):
del current.contents[:]
current.parent = None
current.previous = None
current.previousSibling = None
current.next = None
current.nextSibling = None
current = next
def prettify(self, encoding=DEFAULT_OUTPUT_ENCODING):
return self.__str__(encoding, True)
def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING,
prettyPrint=False, indentLevel=0):
"""Renders the contents of this tag as a string in the given
encoding. If encoding is None, returns a Unicode string.."""
s=[]
for c in self:
text = None
if isinstance(c, NavigableString):
text = c.__str__(encoding)
elif isinstance(c, Tag):
s.append(c.__str__(encoding, prettyPrint, indentLevel))
if text and prettyPrint:
text = text.strip()
if text:
if prettyPrint:
s.append(" " * (indentLevel-1))
s.append(text)
if prettyPrint:
s.append("\n")
return ''.join(s)
#Soup methods
def find(self, name=None, attrs={}, recursive=True, text=None,
**kwargs):
"""Return only the first child of this Tag matching the given
criteria."""
r = None
l = self.findAll(name, attrs, recursive, text, 1, **kwargs)
if l:
r = l[0]
return r
findChild = find
def findAll(self, name=None, attrs={}, recursive=True, text=None,
limit=None, **kwargs):
"""Extracts a list of Tag objects that match the given
criteria. You can specify the name of the Tag and any
attributes you want the Tag to have.
The value of a key-value pair in the 'attrs' map can be a
string, a list of strings, a regular expression object, or a
callable that takes a string and returns whether or not the
string matches for some custom definition of 'matches'. The
same is true of the tag name."""
generator = self.recursiveChildGenerator
if not recursive:
generator = self.childGenerator
return self._findAll(name, attrs, text, limit, generator, **kwargs)
findChildren = findAll
# Pre-3.x compatibility methods
first = find
fetch = findAll
def fetchText(self, text=None, recursive=True, limit=None):
return self.findAll(text=text, recursive=recursive, limit=limit)
def firstText(self, text=None, recursive=True):
return self.find(text=text, recursive=recursive)
#Private methods
def _getAttrMap(self):
"""Initializes a map representation of this tag's attributes,
if not already initialized."""
if not getattr(self, 'attrMap'):
self.attrMap = {}
for (key, value) in self.attrs:
self.attrMap[key] = value
return self.attrMap
#Generator methods
def childGenerator(self):
# Just use the iterator from the contents
return iter(self.contents)
def recursiveChildGenerator(self):
if not len(self.contents):
raise StopIteration
stopNode = self._lastRecursiveChild().next
current = self.contents[0]
while current is not stopNode:
yield current
current = current.next
# Next, a couple classes to represent queries and their results.
class SoupStrainer:
"""Encapsulates a number of ways of matching a markup element (tag or
text)."""
def __init__(self, name=None, attrs={}, text=None, **kwargs):
self.name = name
if isinstance(attrs, basestring):
kwargs['class'] = _match_css_class(attrs)
attrs = None
if kwargs:
if attrs:
attrs = attrs.copy()
attrs.update(kwargs)
else:
attrs = kwargs
self.attrs = attrs
self.text = text
def __str__(self):
if self.text:
return self.text
else:
return "%s|%s" % (self.name, self.attrs)
def searchTag(self, markupName=None, markupAttrs={}):
found = None
markup = None
if isinstance(markupName, Tag):
markup = markupName
markupAttrs = markup
callFunctionWithTagData = callable(self.name) \
and not isinstance(markupName, Tag)
if (not self.name) \
or callFunctionWithTagData \
or (markup and self._matches(markup, self.name)) \
or (not markup and self._matches(markupName, self.name)):
if callFunctionWithTagData:
match = self.name(markupName, markupAttrs)
else:
match = True
markupAttrMap = None
for attr, matchAgainst in self.attrs.items():
if not markupAttrMap:
if hasattr(markupAttrs, 'get'):
markupAttrMap = markupAttrs
else:
markupAttrMap = {}
for k,v in markupAttrs:
markupAttrMap[k] = v
attrValue = markupAttrMap.get(attr)
if not self._matches(attrValue, matchAgainst):
match = False
break
if match:
if markup:
found = markup
else:
found = markupName
return found
def search(self, markup):
#print 'looking for %s in %s' % (self, markup)
found = None
# If given a list of items, scan it for a text element that
# matches.
if hasattr(markup, "__iter__") \
and not isinstance(markup, Tag):
for element in markup:
if isinstance(element, NavigableString) \
and self.search(element):
found = element
break
# If it's a Tag, make sure its name or attributes match.
# Don't bother with Tags if we're searching for text.
elif isinstance(markup, Tag):
if not self.text:
found = self.searchTag(markup)
# If it's text, make sure the text matches.
elif isinstance(markup, NavigableString) or \
isinstance(markup, basestring):
if self._matches(markup, self.text):
found = markup
else:
raise Exception, "I don't know how to match against a %s" \
% markup.__class__
return found
def _matches(self, markup, matchAgainst):
#print "Matching %s against %s" % (markup, matchAgainst)
result = False
if matchAgainst is True:
result = markup is not None
elif callable(matchAgainst):
result = matchAgainst(markup)
else:
#Custom match methods take the tag as an argument, but all
#other ways of matching match the tag name as a string.
if isinstance(markup, Tag):
markup = markup.name
if markup and not isinstance(markup, basestring):
markup = unicode(markup)
#Now we know that chunk is either a string, or None.
if hasattr(matchAgainst, 'match'):
# It's a regexp object.
result = markup and matchAgainst.search(markup)
elif hasattr(matchAgainst, '__iter__'): # list-like
result = markup in matchAgainst
elif hasattr(matchAgainst, 'items'):
result = markup.has_key(matchAgainst)
elif matchAgainst and isinstance(markup, basestring):
if isinstance(markup, unicode):
matchAgainst = unicode(matchAgainst)
else:
matchAgainst = str(matchAgainst)
if not result:
result = matchAgainst == markup
return result
class ResultSet(list):
"""A ResultSet is just a list that keeps track of the SoupStrainer
that created it."""
def __init__(self, source):
list.__init__([])
self.source = source
# Now, some helper functions.
def buildTagMap(default, *args):
"""Turns a list of maps, lists, or scalars into a single map.
Used to build the SELF_CLOSING_TAGS, NESTABLE_TAGS, and
NESTING_RESET_TAGS maps out of lists and partial maps."""
built = {}
for portion in args:
if hasattr(portion, 'items'):
#It's a map. Merge it.
for k,v in portion.items():
built[k] = v
elif hasattr(portion, '__iter__'): # is a list
#It's a list. Map each item to the default.
for k in portion:
built[k] = default
else:
#It's a scalar. Map it to the default.
built[portion] = default
return built
# Now, the parser classes.
class BeautifulStoneSoup(Tag, SGMLParser):
"""This class contains the basic parser and search code. It defines
a parser that knows nothing about tag behavior except for the
following:
You can't close a tag without closing all the tags it encloses.
That is, "<foo><bar></foo>" actually means
"<foo><bar></bar></foo>".
[Another possible explanation is "<foo><bar /></foo>", but since
this class defines no SELF_CLOSING_TAGS, it will never use that
explanation.]
This class is useful for parsing XML or made-up markup languages,
or when BeautifulSoup makes an assumption counter to what you were
expecting."""
SELF_CLOSING_TAGS = {}
NESTABLE_TAGS = {}
RESET_NESTING_TAGS = {}
QUOTE_TAGS = {}
PRESERVE_WHITESPACE_TAGS = []
MARKUP_MASSAGE = [(re.compile('(<[^<>]*)/>'),
lambda x: x.group(1) + ' />'),
(re.compile('<!\s+([^<>]*)>'),
lambda x: '<!' + x.group(1) + '>')
]
ROOT_TAG_NAME = u'[document]'
HTML_ENTITIES = "html"
XML_ENTITIES = "xml"
XHTML_ENTITIES = "xhtml"
# TODO: This only exists for backwards-compatibility
ALL_ENTITIES = XHTML_ENTITIES
# Used when determining whether a text node is all whitespace and
# can be replaced with a single space. A text node that contains
# fancy Unicode spaces (usually non-breaking) should be left
# alone.
STRIP_ASCII_SPACES = { 9: None, 10: None, 12: None, 13: None, 32: None, }
def __init__(self, markup="", parseOnlyThese=None, fromEncoding=None,
markupMassage=True, smartQuotesTo=XML_ENTITIES,
convertEntities=None, selfClosingTags=None, isHTML=False):
"""The Soup object is initialized as the 'root tag', and the
provided markup (which can be a string or a file-like object)
is fed into the underlying parser.
sgmllib will process most bad HTML, and the BeautifulSoup
class has some tricks for dealing with some HTML that kills
sgmllib, but Beautiful Soup can nonetheless choke or lose data
if your data uses self-closing tags or declarations
incorrectly.
By default, Beautiful Soup uses regexes to sanitize input,
avoiding the vast majority of these problems. If the problems
don't apply to you, pass in False for markupMassage, and
you'll get better performance.
The default parser massage techniques fix the two most common
instances of invalid HTML that choke sgmllib:
<br/> (No space between name of closing tag and tag close)
<! --Comment--> (Extraneous whitespace in declaration)
You can pass in a custom list of (RE object, replace method)
tuples to get Beautiful Soup to scrub your input the way you
want."""
self.parseOnlyThese = parseOnlyThese
self.fromEncoding = fromEncoding
self.smartQuotesTo = smartQuotesTo
self.convertEntities = convertEntities
# Set the rules for how we'll deal with the entities we
# encounter
if self.convertEntities:
# It doesn't make sense to convert encoded characters to
# entities even while you're converting entities to Unicode.
# Just convert it all to Unicode.
self.smartQuotesTo = None
if convertEntities == self.HTML_ENTITIES:
self.convertXMLEntities = False
self.convertHTMLEntities = True
self.escapeUnrecognizedEntities = True
elif convertEntities == self.XHTML_ENTITIES:
self.convertXMLEntities = True
self.convertHTMLEntities = True
self.escapeUnrecognizedEntities = False
elif convertEntities == self.XML_ENTITIES:
self.convertXMLEntities = True
self.convertHTMLEntities = False
self.escapeUnrecognizedEntities = False
else:
self.convertXMLEntities = False
self.convertHTMLEntities = False
self.escapeUnrecognizedEntities = False
self.instanceSelfClosingTags = buildTagMap(None, selfClosingTags)
SGMLParser.__init__(self)
if hasattr(markup, 'read'): # It's a file-type object.
markup = markup.read()
self.markup = markup
self.markupMassage = markupMassage
try:
self._feed(isHTML=isHTML)
except StopParsing:
pass
self.markup = None # The markup can now be GCed
def convert_charref(self, name):
"""This method fixes a bug in Python's SGMLParser."""
try:
n = int(name)
except ValueError:
return
if not 0 <= n <= 127 : # ASCII ends at 127, not 255
return
return self.convert_codepoint(n)
def _feed(self, inDocumentEncoding=None, isHTML=False):
# Convert the document to Unicode.
markup = self.markup
if isinstance(markup, unicode):
if not hasattr(self, 'originalEncoding'):
self.originalEncoding = None
else:
dammit = UnicodeDammit\
(markup, [self.fromEncoding, inDocumentEncoding],
smartQuotesTo=self.smartQuotesTo, isHTML=isHTML)
markup = dammit.unicode
self.originalEncoding = dammit.originalEncoding
self.declaredHTMLEncoding = dammit.declaredHTMLEncoding
if markup:
if self.markupMassage:
if not hasattr(self.markupMassage, "__iter__"):
self.markupMassage = self.MARKUP_MASSAGE
for fix, m in self.markupMassage:
markup = fix.sub(m, markup)
# TODO: We get rid of markupMassage so that the
# soup object can be deepcopied later on. Some
# Python installations can't copy regexes. If anyone
# was relying on the existence of markupMassage, this
# might cause problems.
del(self.markupMassage)
self.reset()
SGMLParser.feed(self, markup)
# Close out any unfinished strings and close all the open tags.
self.endData()
while self.currentTag.name != self.ROOT_TAG_NAME:
self.popTag()
def __getattr__(self, methodName):
"""This method routes method call requests to either the SGMLParser
superclass or the Tag superclass, depending on the method name."""
#print "__getattr__ called on %s.%s" % (self.__class__, methodName)
if methodName.startswith('start_') or methodName.startswith('end_') \
or methodName.startswith('do_'):
return SGMLParser.__getattr__(self, methodName)
elif not methodName.startswith('__'):
return Tag.__getattr__(self, methodName)
else:
raise AttributeError
def isSelfClosingTag(self, name):
"""Returns true iff the given string is the name of a
self-closing tag according to this parser."""
return self.SELF_CLOSING_TAGS.has_key(name) \
or self.instanceSelfClosingTags.has_key(name)
def reset(self):
Tag.__init__(self, self, self.ROOT_TAG_NAME)
self.hidden = 1
SGMLParser.reset(self)
self.currentData = []
self.currentTag = None
self.tagStack = []
self.quoteStack = []
self.pushTag(self)
def popTag(self):
tag = self.tagStack.pop()
#print "Pop", tag.name
if self.tagStack:
self.currentTag = self.tagStack[-1]
return self.currentTag
def pushTag(self, tag):
#print "Push", tag.name
if self.currentTag:
self.currentTag.contents.append(tag)
self.tagStack.append(tag)
self.currentTag = self.tagStack[-1]
def endData(self, containerClass=NavigableString):
if self.currentData:
currentData = u''.join(self.currentData)
if (currentData.translate(self.STRIP_ASCII_SPACES) == '' and
not set([tag.name for tag in self.tagStack]).intersection(
self.PRESERVE_WHITESPACE_TAGS)):
if '\n' in currentData:
currentData = '\n'
else:
currentData = ' '
self.currentData = []
if self.parseOnlyThese and len(self.tagStack) <= 1 and \
(not self.parseOnlyThese.text or \
not self.parseOnlyThese.search(currentData)):
return
o = containerClass(currentData)
o.setup(self.currentTag, self.previous)
if self.previous:
self.previous.next = o
self.previous = o
self.currentTag.contents.append(o)
def _popToTag(self, name, inclusivePop=True):
"""Pops the tag stack up to and including the most recent
instance of the given tag. If inclusivePop is false, pops the tag
stack up to but *not* including the most recent instqance of
the given tag."""
#print "Popping to %s" % name
if name == self.ROOT_TAG_NAME:
return
numPops = 0
mostRecentTag = None
for i in range(len(self.tagStack)-1, 0, -1):
if name == self.tagStack[i].name:
numPops = len(self.tagStack)-i
break
if not inclusivePop:
numPops = numPops - 1
for i in range(0, numPops):
mostRecentTag = self.popTag()
return mostRecentTag
def _smartPop(self, name):
"""We need to pop up to the previous tag of this type, unless
one of this tag's nesting reset triggers comes between this
tag and the previous tag of this type, OR unless this tag is a
generic nesting trigger and another generic nesting trigger
comes between this tag and the previous tag of this type.
Examples:
<p>Foo<b>Bar *<p>* should pop to 'p', not 'b'.
<p>Foo<table>Bar *<p>* should pop to 'table', not 'p'.
<p>Foo<table><tr>Bar *<p>* should pop to 'tr', not 'p'.
<li><ul><li> *<li>* should pop to 'ul', not the first 'li'.
<tr><table><tr> *<tr>* should pop to 'table', not the first 'tr'
<td><tr><td> *<td>* should pop to 'tr', not the first 'td'
"""
nestingResetTriggers = self.NESTABLE_TAGS.get(name)
isNestable = nestingResetTriggers != None
isResetNesting = self.RESET_NESTING_TAGS.has_key(name)
popTo = None
inclusive = True
for i in range(len(self.tagStack)-1, 0, -1):
p = self.tagStack[i]
if (not p or p.name == name) and not isNestable:
#Non-nestable tags get popped to the top or to their
#last occurance.
popTo = name
break
if (nestingResetTriggers is not None
and p.name in nestingResetTriggers) \
or (nestingResetTriggers is None and isResetNesting
and self.RESET_NESTING_TAGS.has_key(p.name)):
#If we encounter one of the nesting reset triggers
#peculiar to this tag, or we encounter another tag
#that causes nesting to reset, pop up to but not
#including that tag.
popTo = p.name
inclusive = False
break
p = p.parent
if popTo:
self._popToTag(popTo, inclusive)
def unknown_starttag(self, name, attrs, selfClosing=0):
#print "Start tag %s: %s" % (name, attrs)
if self.quoteStack:
#This is not a real tag.
#print "<%s> is not real!" % name
attrs = ''.join([' %s="%s"' % (x, y) for x, y in attrs])
self.handle_data('<%s%s>' % (name, attrs))
return
self.endData()
if not self.isSelfClosingTag(name) and not selfClosing:
self._smartPop(name)
if self.parseOnlyThese and len(self.tagStack) <= 1 \
and (self.parseOnlyThese.text or not self.parseOnlyThese.searchTag(name, attrs)):
return
tag = Tag(self, name, attrs, self.currentTag, self.previous)
if self.previous:
self.previous.next = tag
self.previous = tag
self.pushTag(tag)
if selfClosing or self.isSelfClosingTag(name):
self.popTag()
if name in self.QUOTE_TAGS:
#print "Beginning quote (%s)" % name
self.quoteStack.append(name)
self.literal = 1
return tag
def unknown_endtag(self, name):
#print "End tag %s" % name
if self.quoteStack and self.quoteStack[-1] != name:
#This is not a real end tag.
#print "</%s> is not real!" % name
self.handle_data('</%s>' % name)
return
self.endData()
self._popToTag(name)
if self.quoteStack and self.quoteStack[-1] == name:
self.quoteStack.pop()
self.literal = (len(self.quoteStack) > 0)
def handle_data(self, data):
self.currentData.append(data)
def _toStringSubclass(self, text, subclass):
"""Adds a certain piece of text to the tree as a NavigableString
subclass."""
self.endData()
self.handle_data(text)
self.endData(subclass)
def handle_pi(self, text):
"""Handle a processing instruction as a ProcessingInstruction
object, possibly one with a %SOUP-ENCODING% slot into which an
encoding will be plugged later."""
if text[:3] == "xml":
text = u"xml version='1.0' encoding='%SOUP-ENCODING%'"
self._toStringSubclass(text, ProcessingInstruction)
def handle_comment(self, text):
"Handle comments as Comment objects."
self._toStringSubclass(text, Comment)
def handle_charref(self, ref):
"Handle character references as data."
if self.convertEntities:
data = unichr(int(ref))
else:
data = '&#%s;' % ref
self.handle_data(data)
def handle_entityref(self, ref):
"""Handle entity references as data, possibly converting known
HTML and/or XML entity references to the corresponding Unicode
characters."""
data = None
if self.convertHTMLEntities:
try:
data = unichr(name2codepoint[ref])
except KeyError:
pass
if not data and self.convertXMLEntities:
data = self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref)
if not data and self.convertHTMLEntities and \
not self.XML_ENTITIES_TO_SPECIAL_CHARS.get(ref):
# TODO: We've got a problem here. We're told this is
# an entity reference, but it's not an XML entity
# reference or an HTML entity reference. Nonetheless,
# the logical thing to do is to pass it through as an
# unrecognized entity reference.
#
# Except: when the input is "&carol;" this function
# will be called with input "carol". When the input is
# "AT&T", this function will be called with input
# "T". We have no way of knowing whether a semicolon
# was present originally, so we don't know whether
# this is an unknown entity or just a misplaced
# ampersand.
#
# The more common case is a misplaced ampersand, so I
# escape the ampersand and omit the trailing semicolon.
data = "&%s" % ref
if not data:
# This case is different from the one above, because we
# haven't already gone through a supposedly comprehensive
# mapping of entities to Unicode characters. We might not
# have gone through any mapping at all. So the chances are
# very high that this is a real entity, and not a
# misplaced ampersand.
data = "&%s;" % ref
self.handle_data(data)
def handle_decl(self, data):
"Handle DOCTYPEs and the like as Declaration objects."
self._toStringSubclass(data, Declaration)
def parse_declaration(self, i):
"""Treat a bogus SGML declaration as raw data. Treat a CDATA
declaration as a CData object."""
j = None
if self.rawdata[i:i+9] == '<![CDATA[':
k = self.rawdata.find(']]>', i)
if k == -1:
k = len(self.rawdata)
data = self.rawdata[i+9:k]
j = k+3
self._toStringSubclass(data, CData)
else:
try:
j = SGMLParser.parse_declaration(self, i)
except SGMLParseError:
toHandle = self.rawdata[i:]
self.handle_data(toHandle)
j = i + len(toHandle)
return j
class BeautifulSoup(BeautifulStoneSoup):
"""This parser knows the following facts about HTML:
* Some tags have no closing tag and should be interpreted as being
closed as soon as they are encountered.
* The text inside some tags (ie. 'script') may contain tags which
are not really part of the document and which should be parsed
as text, not tags. If you want to parse the text as tags, you can
always fetch it and parse it explicitly.
* Tag nesting rules:
Most tags can't be nested at all. For instance, the occurance of
a <p> tag should implicitly close the previous <p> tag.
<p>Para1<p>Para2
should be transformed into:
<p>Para1</p><p>Para2
Some tags can be nested arbitrarily. For instance, the occurance
of a <blockquote> tag should _not_ implicitly close the previous
<blockquote> tag.
Alice said: <blockquote>Bob said: <blockquote>Blah
should NOT be transformed into:
Alice said: <blockquote>Bob said: </blockquote><blockquote>Blah
Some tags can be nested, but the nesting is reset by the
interposition of other tags. For instance, a <tr> tag should
implicitly close the previous <tr> tag within the same <table>,
but not close a <tr> tag in another table.
<table><tr>Blah<tr>Blah
should be transformed into:
<table><tr>Blah</tr><tr>Blah
but,
<tr>Blah<table><tr>Blah
should NOT be transformed into
<tr>Blah<table></tr><tr>Blah
Differing assumptions about tag nesting rules are a major source
of problems with the BeautifulSoup class. If BeautifulSoup is not
treating as nestable a tag your page author treats as nestable,
try ICantBelieveItsBeautifulSoup, MinimalSoup, or
BeautifulStoneSoup before writing your own subclass."""
def __init__(self, *args, **kwargs):
if not kwargs.has_key('smartQuotesTo'):
kwargs['smartQuotesTo'] = self.HTML_ENTITIES
kwargs['isHTML'] = True
BeautifulStoneSoup.__init__(self, *args, **kwargs)
SELF_CLOSING_TAGS = buildTagMap(None,
('br' , 'hr', 'input', 'img', 'meta',
'spacer', 'link', 'frame', 'base', 'col'))
PRESERVE_WHITESPACE_TAGS = set(['pre', 'textarea'])
QUOTE_TAGS = {'script' : None, 'textarea' : None}
#According to the HTML standard, each of these inline tags can
#contain another tag of the same type. Furthermore, it's common
#to actually use these tags this way.
NESTABLE_INLINE_TAGS = ('span', 'font', 'q', 'object', 'bdo', 'sub', 'sup',
'center')
#According to the HTML standard, these block tags can contain
#another tag of the same type. Furthermore, it's common
#to actually use these tags this way.
NESTABLE_BLOCK_TAGS = ('blockquote', 'div', 'fieldset', 'ins', 'del')
#Lists can contain other lists, but there are restrictions.
NESTABLE_LIST_TAGS = { 'ol' : [],
'ul' : [],
'li' : ['ul', 'ol'],
'dl' : [],
'dd' : ['dl'],
'dt' : ['dl'] }
#Tables can contain other tables, but there are restrictions.
NESTABLE_TABLE_TAGS = {'table' : [],
'tr' : ['table', 'tbody', 'tfoot', 'thead'],
'td' : ['tr'],
'th' : ['tr'],
'thead' : ['table'],
'tbody' : ['table'],
'tfoot' : ['table'],
}
NON_NESTABLE_BLOCK_TAGS = ('address', 'form', 'p', 'pre')
#If one of these tags is encountered, all tags up to the next tag of
#this type are popped.
RESET_NESTING_TAGS = buildTagMap(None, NESTABLE_BLOCK_TAGS, 'noscript',
NON_NESTABLE_BLOCK_TAGS,
NESTABLE_LIST_TAGS,
NESTABLE_TABLE_TAGS)
NESTABLE_TAGS = buildTagMap([], NESTABLE_INLINE_TAGS, NESTABLE_BLOCK_TAGS,
NESTABLE_LIST_TAGS, NESTABLE_TABLE_TAGS)
# Used to detect the charset in a META tag; see start_meta
CHARSET_RE = re.compile("((^|;)\s*charset=)([^;]*)", re.M)
def start_meta(self, attrs):
"""Beautiful Soup can detect a charset included in a META tag,
try to convert the document to that charset, and re-parse the
document from the beginning."""
httpEquiv = None
contentType = None
contentTypeIndex = None
tagNeedsEncodingSubstitution = False
for i in range(0, len(attrs)):
key, value = attrs[i]
key = key.lower()
if key == 'http-equiv':
httpEquiv = value
elif key == 'content':
contentType = value
contentTypeIndex = i
if httpEquiv and contentType: # It's an interesting meta tag.
match = self.CHARSET_RE.search(contentType)
if match:
if (self.declaredHTMLEncoding is not None or
self.originalEncoding == self.fromEncoding):
# An HTML encoding was sniffed while converting
# the document to Unicode, or an HTML encoding was
# sniffed during a previous pass through the
# document, or an encoding was specified
# explicitly and it worked. Rewrite the meta tag.
def rewrite(match):
return match.group(1) + "%SOUP-ENCODING%"
newAttr = self.CHARSET_RE.sub(rewrite, contentType)
attrs[contentTypeIndex] = (attrs[contentTypeIndex][0],
newAttr)
tagNeedsEncodingSubstitution = True
else:
# This is our first pass through the document.
# Go through it again with the encoding information.
newCharset = match.group(3)
if newCharset and newCharset != self.originalEncoding:
self.declaredHTMLEncoding = newCharset
self._feed(self.declaredHTMLEncoding)
raise StopParsing
pass
tag = self.unknown_starttag("meta", attrs)
if tag and tagNeedsEncodingSubstitution:
tag.containsSubstitutions = True
class StopParsing(Exception):
pass
class ICantBelieveItsBeautifulSoup(BeautifulSoup):
"""The BeautifulSoup class is oriented towards skipping over
common HTML errors like unclosed tags. However, sometimes it makes
errors of its own. For instance, consider this fragment:
<b>Foo<b>Bar</b></b>
This is perfectly valid (if bizarre) HTML. However, the
BeautifulSoup class will implicitly close the first b tag when it
encounters the second 'b'. It will think the author wrote
"<b>Foo<b>Bar", and didn't close the first 'b' tag, because
there's no real-world reason to bold something that's already
bold. When it encounters '</b></b>' it will close two more 'b'
tags, for a grand total of three tags closed instead of two. This
can throw off the rest of your document structure. The same is
true of a number of other tags, listed below.
It's much more common for someone to forget to close a 'b' tag
than to actually use nested 'b' tags, and the BeautifulSoup class
handles the common case. This class handles the not-co-common
case: where you can't believe someone wrote what they did, but
it's valid HTML and BeautifulSoup screwed up by assuming it
wouldn't be."""
I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS = \
('em', 'big', 'i', 'small', 'tt', 'abbr', 'acronym', 'strong',
'cite', 'code', 'dfn', 'kbd', 'samp', 'strong', 'var', 'b',
'big')
I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS = ('noscript',)
NESTABLE_TAGS = buildTagMap([], BeautifulSoup.NESTABLE_TAGS,
I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS,
I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS)
class MinimalSoup(BeautifulSoup):
"""The MinimalSoup class is for parsing HTML that contains
pathologically bad markup. It makes no assumptions about tag
nesting, but it does know which tags are self-closing, that
<script> tags contain Javascript and should not be parsed, that
META tags may contain encoding information, and so on.
This also makes it better for subclassing than BeautifulStoneSoup
or BeautifulSoup."""
RESET_NESTING_TAGS = buildTagMap('noscript')
NESTABLE_TAGS = {}
class BeautifulSOAP(BeautifulStoneSoup):
"""This class will push a tag with only a single string child into
the tag's parent as an attribute. The attribute's name is the tag
name, and the value is the string child. An example should give
the flavor of the change:
<foo><bar>baz</bar></foo>
=>
<foo bar="baz"><bar>baz</bar></foo>
You can then access fooTag['bar'] instead of fooTag.barTag.string.
This is, of course, useful for scraping structures that tend to
use subelements instead of attributes, such as SOAP messages. Note
that it modifies its input, so don't print the modified version
out.
I'm not sure how many people really want to use this class; let me
know if you do. Mainly I like the name."""
def popTag(self):
if len(self.tagStack) > 1:
tag = self.tagStack[-1]
parent = self.tagStack[-2]
parent._getAttrMap()
if (isinstance(tag, Tag) and len(tag.contents) == 1 and
isinstance(tag.contents[0], NavigableString) and
not parent.attrMap.has_key(tag.name)):
parent[tag.name] = tag.contents[0]
BeautifulStoneSoup.popTag(self)
#Enterprise class names! It has come to our attention that some people
#think the names of the Beautiful Soup parser classes are too silly
#and "unprofessional" for use in enterprise screen-scraping. We feel
#your pain! For such-minded folk, the Beautiful Soup Consortium And
#All-Night Kosher Bakery recommends renaming this file to
#"RobustParser.py" (or, in cases of extreme enterprisiness,
#"RobustParserBeanInterface.class") and using the following
#enterprise-friendly class aliases:
class RobustXMLParser(BeautifulStoneSoup):
pass
class RobustHTMLParser(BeautifulSoup):
pass
class RobustWackAssHTMLParser(ICantBelieveItsBeautifulSoup):
pass
class RobustInsanelyWackAssHTMLParser(MinimalSoup):
pass
class SimplifyingSOAPParser(BeautifulSOAP):
pass
######################################################
#
# Bonus library: Unicode, Dammit
#
# This class forces XML data into a standard format (usually to UTF-8
# or Unicode). It is heavily based on code from Mark Pilgrim's
# Universal Feed Parser. It does not rewrite the XML or HTML to
# reflect a new encoding: that happens in BeautifulStoneSoup.handle_pi
# (XML) and BeautifulSoup.start_meta (HTML).
# Autodetects character encodings.
# Download from http://chardet.feedparser.org/
try:
import chardet
# import chardet.constants
# chardet.constants._debug = 1
except ImportError:
chardet = None
# cjkcodecs and iconv_codec make Python know about more character encodings.
# Both are available from http://cjkpython.i18n.org/
# They're built in if you use Python 2.4.
try:
import cjkcodecs.aliases
except ImportError:
pass
try:
import iconv_codec
except ImportError:
pass
class UnicodeDammit:
"""A class for detecting the encoding of a *ML document and
converting it to a Unicode string. If the source encoding is
windows-1252, can replace MS smart quotes with their HTML or XML
equivalents."""
# This dictionary maps commonly seen values for "charset" in HTML
# meta tags to the corresponding Python codec names. It only covers
# values that aren't in Python's aliases and can't be determined
# by the heuristics in find_codec.
CHARSET_ALIASES = { "macintosh" : "mac-roman",
"x-sjis" : "shift-jis" }
def __init__(self, markup, overrideEncodings=[],
smartQuotesTo='xml', isHTML=False):
self.declaredHTMLEncoding = None
self.markup, documentEncoding, sniffedEncoding = \
self._detectEncoding(markup, isHTML)
self.smartQuotesTo = smartQuotesTo
self.triedEncodings = []
if markup == '' or isinstance(markup, unicode):
self.originalEncoding = None
self.unicode = unicode(markup)
return
u = None
for proposedEncoding in overrideEncodings:
u = self._convertFrom(proposedEncoding)
if u: break
if not u:
for proposedEncoding in (documentEncoding, sniffedEncoding):
u = self._convertFrom(proposedEncoding)
if u: break
# If no luck and we have auto-detection library, try that:
if not u and chardet and not isinstance(self.markup, unicode):
u = self._convertFrom(chardet.detect(self.markup)['encoding'])
# As a last resort, try utf-8 and windows-1252:
if not u:
for proposed_encoding in ("utf-8", "windows-1252"):
u = self._convertFrom(proposed_encoding)
if u: break
self.unicode = u
if not u: self.originalEncoding = None
def _subMSChar(self, orig):
"""Changes a MS smart quote character to an XML or HTML
entity."""
sub = self.MS_CHARS.get(orig)
if isinstance(sub, tuple):
if self.smartQuotesTo == 'xml':
sub = '&#x%s;' % sub[1]
else:
sub = '&%s;' % sub[0]
return sub
def _convertFrom(self, proposed):
proposed = self.find_codec(proposed)
if not proposed or proposed in self.triedEncodings:
return None
self.triedEncodings.append(proposed)
markup = self.markup
# Convert smart quotes to HTML if coming from an encoding
# that might have them.
if self.smartQuotesTo and proposed.lower() in("windows-1252",
"iso-8859-1",
"iso-8859-2"):
markup = re.compile("([\x80-\x9f])").sub \
(lambda(x): self._subMSChar(x.group(1)),
markup)
try:
# print "Trying to convert document to %s" % proposed
u = self._toUnicode(markup, proposed)
self.markup = u
self.originalEncoding = proposed
except Exception, e:
# print "That didn't work!"
# print e
return None
#print "Correct encoding: %s" % proposed
return self.markup
def _toUnicode(self, data, encoding):
'''Given a string and its encoding, decodes the string into Unicode.
%encoding is a string recognized by encodings.aliases'''
# strip Byte Order Mark (if present)
if (len(data) >= 4) and (data[:2] == '\xfe\xff') \
and (data[2:4] != '\x00\x00'):
encoding = 'utf-16be'
data = data[2:]
elif (len(data) >= 4) and (data[:2] == '\xff\xfe') \
and (data[2:4] != '\x00\x00'):
encoding = 'utf-16le'
data = data[2:]
elif data[:3] == '\xef\xbb\xbf':
encoding = 'utf-8'
data = data[3:]
elif data[:4] == '\x00\x00\xfe\xff':
encoding = 'utf-32be'
data = data[4:]
elif data[:4] == '\xff\xfe\x00\x00':
encoding = 'utf-32le'
data = data[4:]
newdata = unicode(data, encoding)
return newdata
def _detectEncoding(self, xml_data, isHTML=False):
"""Given a document, tries to detect its XML encoding."""
xml_encoding = sniffed_xml_encoding = None
try:
if xml_data[:4] == '\x4c\x6f\xa7\x94':
# EBCDIC
xml_data = self._ebcdic_to_ascii(xml_data)
elif xml_data[:4] == '\x00\x3c\x00\x3f':
# UTF-16BE
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data, 'utf-16be').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == '\xfe\xff') \
and (xml_data[2:4] != '\x00\x00'):
# UTF-16BE with BOM
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8')
elif xml_data[:4] == '\x3c\x00\x3f\x00':
# UTF-16LE
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data, 'utf-16le').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == '\xff\xfe') and \
(xml_data[2:4] != '\x00\x00'):
# UTF-16LE with BOM
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8')
elif xml_data[:4] == '\x00\x00\x00\x3c':
# UTF-32BE
sniffed_xml_encoding = 'utf-32be'
xml_data = unicode(xml_data, 'utf-32be').encode('utf-8')
elif xml_data[:4] == '\x3c\x00\x00\x00':
# UTF-32LE
sniffed_xml_encoding = 'utf-32le'
xml_data = unicode(xml_data, 'utf-32le').encode('utf-8')
elif xml_data[:4] == '\x00\x00\xfe\xff':
# UTF-32BE with BOM
sniffed_xml_encoding = 'utf-32be'
xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8')
elif xml_data[:4] == '\xff\xfe\x00\x00':
# UTF-32LE with BOM
sniffed_xml_encoding = 'utf-32le'
xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8')
elif xml_data[:3] == '\xef\xbb\xbf':
# UTF-8 with BOM
sniffed_xml_encoding = 'utf-8'
xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8')
else:
sniffed_xml_encoding = 'ascii'
pass
except:
xml_encoding_match = None
xml_encoding_match = re.compile(
'^<\?.*encoding=[\'"](.*?)[\'"].*\?>').match(xml_data)
if not xml_encoding_match and isHTML:
regexp = re.compile('<\s*meta[^>]+charset=([^>]*?)[;\'">]', re.I)
xml_encoding_match = regexp.search(xml_data)
if xml_encoding_match is not None:
xml_encoding = xml_encoding_match.groups()[0].lower()
if isHTML:
self.declaredHTMLEncoding = xml_encoding
if sniffed_xml_encoding and \
(xml_encoding in ('iso-10646-ucs-2', 'ucs-2', 'csunicode',
'iso-10646-ucs-4', 'ucs-4', 'csucs4',
'utf-16', 'utf-32', 'utf_16', 'utf_32',
'utf16', 'u16')):
xml_encoding = sniffed_xml_encoding
return xml_data, xml_encoding, sniffed_xml_encoding
def find_codec(self, charset):
return self._codec(self.CHARSET_ALIASES.get(charset, charset)) \
or (charset and self._codec(charset.replace("-", ""))) \
or (charset and self._codec(charset.replace("-", "_"))) \
or charset
def _codec(self, charset):
if not charset: return charset
codec = None
try:
codecs.lookup(charset)
codec = charset
except (LookupError, ValueError):
pass
return codec
EBCDIC_TO_ASCII_MAP = None
def _ebcdic_to_ascii(self, s):
c = self.__class__
if not c.EBCDIC_TO_ASCII_MAP:
emap = (0,1,2,3,156,9,134,127,151,141,142,11,12,13,14,15,
16,17,18,19,157,133,8,135,24,25,146,143,28,29,30,31,
128,129,130,131,132,10,23,27,136,137,138,139,140,5,6,7,
144,145,22,147,148,149,150,4,152,153,154,155,20,21,158,26,
32,160,161,162,163,164,165,166,167,168,91,46,60,40,43,33,
38,169,170,171,172,173,174,175,176,177,93,36,42,41,59,94,
45,47,178,179,180,181,182,183,184,185,124,44,37,95,62,63,
186,187,188,189,190,191,192,193,194,96,58,35,64,39,61,34,
195,97,98,99,100,101,102,103,104,105,196,197,198,199,200,
201,202,106,107,108,109,110,111,112,113,114,203,204,205,
206,207,208,209,126,115,116,117,118,119,120,121,122,210,
211,212,213,214,215,216,217,218,219,220,221,222,223,224,
225,226,227,228,229,230,231,123,65,66,67,68,69,70,71,72,
73,232,233,234,235,236,237,125,74,75,76,77,78,79,80,81,
82,238,239,240,241,242,243,92,159,83,84,85,86,87,88,89,
90,244,245,246,247,248,249,48,49,50,51,52,53,54,55,56,57,
250,251,252,253,254,255)
import string
c.EBCDIC_TO_ASCII_MAP = string.maketrans( \
''.join(map(chr, range(256))), ''.join(map(chr, emap)))
return s.translate(c.EBCDIC_TO_ASCII_MAP)
MS_CHARS = { '\x80' : ('euro', '20AC'),
'\x81' : ' ',
'\x82' : ('sbquo', '201A'),
'\x83' : ('fnof', '192'),
'\x84' : ('bdquo', '201E'),
'\x85' : ('hellip', '2026'),
'\x86' : ('dagger', '2020'),
'\x87' : ('Dagger', '2021'),
'\x88' : ('circ', '2C6'),
'\x89' : ('permil', '2030'),
'\x8A' : ('Scaron', '160'),
'\x8B' : ('lsaquo', '2039'),
'\x8C' : ('OElig', '152'),
'\x8D' : '?',
'\x8E' : ('#x17D', '17D'),
'\x8F' : '?',
'\x90' : '?',
'\x91' : ('lsquo', '2018'),
'\x92' : ('rsquo', '2019'),
'\x93' : ('ldquo', '201C'),
'\x94' : ('rdquo', '201D'),
'\x95' : ('bull', '2022'),
'\x96' : ('ndash', '2013'),
'\x97' : ('mdash', '2014'),
'\x98' : ('tilde', '2DC'),
'\x99' : ('trade', '2122'),
'\x9a' : ('scaron', '161'),
'\x9b' : ('rsaquo', '203A'),
'\x9c' : ('oelig', '153'),
'\x9d' : '?',
'\x9e' : ('#x17E', '17E'),
'\x9f' : ('Yuml', ''),}
#######################################################################
#By default, act as an HTML pretty-printer.
if __name__ == '__main__':
import sys
soup = BeautifulSoup(sys.stdin)
print soup.prettify()
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/thirdparty/BeautifulSoup.py
|
BeautifulSoup.py
|
__author__ = "Joe Gregorio ([email protected])"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = []
__version__ = "1.0.0"
__license__ = "MIT"
__history__ = """
"""
import urlparse
# Convert an IRI to a URI following the rules in RFC 3987
#
# The characters we need to enocde and escape are defined in the spec:
#
# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
# / %xD0000-DFFFD / %xE1000-EFFFD
escape_range = [
(0xA0, 0xD7FF ),
(0xE000, 0xF8FF ),
(0xF900, 0xFDCF ),
(0xFDF0, 0xFFEF),
(0x10000, 0x1FFFD ),
(0x20000, 0x2FFFD ),
(0x30000, 0x3FFFD),
(0x40000, 0x4FFFD ),
(0x50000, 0x5FFFD ),
(0x60000, 0x6FFFD),
(0x70000, 0x7FFFD ),
(0x80000, 0x8FFFD ),
(0x90000, 0x9FFFD),
(0xA0000, 0xAFFFD ),
(0xB0000, 0xBFFFD ),
(0xC0000, 0xCFFFD),
(0xD0000, 0xDFFFD ),
(0xE1000, 0xEFFFD),
(0xF0000, 0xFFFFD ),
(0x100000, 0x10FFFD)
]
def encode(c):
retval = c
i = ord(c)
for low, high in escape_range:
if i < low:
break
if i >= low and i <= high:
retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')])
break
return retval
def iri2uri(uri):
"""Convert an IRI to a URI. Note that IRIs must be
passed in a unicode strings. That is, do not utf-8 encode
the IRI before passing it into the function."""
if isinstance(uri ,unicode):
(scheme, authority, path, query, fragment) = urlparse.urlsplit(uri)
authority = authority.encode('idna')
# For each character in 'ucschar' or 'iprivate'
# 1. encode as utf-8
# 2. then %-encode each octet of that utf-8
uri = urlparse.urlunsplit((scheme, authority, path, query, fragment))
uri = "".join([encode(c) for c in uri])
return uri
if __name__ == "__main__":
import unittest
class Test(unittest.TestCase):
def test_uris(self):
"""Test that URIs are invariant under the transformation."""
invariant = [
u"ftp://ftp.is.co.za/rfc/rfc1808.txt",
u"http://www.ietf.org/rfc/rfc2396.txt",
u"ldap://[2001:db8::7]/c=GB?objectClass?one",
u"mailto:[email protected]",
u"news:comp.infosystems.www.servers.unix",
u"tel:+1-816-555-1212",
u"telnet://192.0.2.16:80/",
u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
for uri in invariant:
self.assertEqual(uri, iri2uri(uri))
def test_iri(self):
""" Test that the right type of escaping is done for each part of the URI."""
self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}"))
self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}"))
self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}"))
self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}"))
self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
unittest.main()
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/thirdparty/httplib2/iri2uri.py
|
iri2uri.py
|
from __future__ import generators
"""
httplib2
A caching http interface that supports ETags and gzip
to conserve bandwidth.
Requires Python 2.3 or later
Changelog:
2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
"""
__author__ = "Joe Gregorio ([email protected])"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = ["Thomas Broyer ([email protected])",
"James Antill",
"Xavier Verges Farrero",
"Jonathan Feinberg",
"Blair Zajac",
"Sam Ruby",
"Louis Nyffenegger"]
__license__ = "MIT"
__version__ = "0.7.7"
import re
import sys
import email
import email.Utils
import email.Message
import email.FeedParser
import StringIO
import gzip
import zlib
import httplib
import urlparse
import urllib
import base64
import os
import copy
import calendar
import time
import random
import errno
try:
from hashlib import sha1 as _sha, md5 as _md5
except ImportError:
# prior to Python 2.5, these were separate modules
import sha
import md5
_sha = sha.new
_md5 = md5.new
import hmac
from gettext import gettext as _
import socket
try:
from httplib2 import socks
except ImportError:
try:
import socks
except ImportError:
socks = None
# Build the appropriate socket wrapper for ssl
try:
import ssl # python 2.6
ssl_SSLError = ssl.SSLError
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if disable_validation:
cert_reqs = ssl.CERT_NONE
else:
cert_reqs = ssl.CERT_REQUIRED
# We should be specifying SSL version 3 or TLS v1, but the ssl module
# doesn't expose the necessary knobs. So we need to go with the default
# of SSLv23.
return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file,
cert_reqs=cert_reqs, ca_certs=ca_certs)
except (AttributeError, ImportError):
ssl_SSLError = None
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if not disable_validation:
raise CertificateValidationUnsupported(
"SSL certificate validation is not supported without "
"the ssl module installed. To avoid this error, install "
"the ssl module, or explicity disable validation.")
ssl_sock = socket.ssl(sock, key_file, cert_file)
return httplib.FakeSocket(sock, ssl_sock)
if sys.version_info >= (2,3):
from iri2uri import iri2uri
else:
def iri2uri(uri):
return uri
def has_timeout(timeout): # python 2.6
if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
return (timeout is not None)
__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error',
'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent',
'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError',
'debuglevel', 'ProxiesUnavailableError']
# The httplib debug level, set to a non-zero value to get debug output
debuglevel = 0
# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
RETRIES = 2
# Python 2.3 support
if sys.version_info < (2,4):
def sorted(seq):
seq.sort()
return seq
# Python 2.3 support
def HTTPResponse__getheaders(self):
"""Return list of (header, value) tuples."""
if self.msg is None:
raise httplib.ResponseNotReady()
return self.msg.items()
if not hasattr(httplib.HTTPResponse, 'getheaders'):
httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
# All exceptions raised here derive from HttpLib2Error
class HttpLib2Error(Exception): pass
# Some exceptions can be caught and optionally
# be turned back into responses.
class HttpLib2ErrorWithResponse(HttpLib2Error):
def __init__(self, desc, response, content):
self.response = response
self.content = content
HttpLib2Error.__init__(self, desc)
class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
class RedirectLimit(HttpLib2ErrorWithResponse): pass
class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class MalformedHeader(HttpLib2Error): pass
class RelativeURIError(HttpLib2Error): pass
class ServerNotFoundError(HttpLib2Error): pass
class ProxiesUnavailableError(HttpLib2Error): pass
class CertificateValidationUnsupported(HttpLib2Error): pass
class SSLHandshakeError(HttpLib2Error): pass
class NotSupportedOnThisPlatform(HttpLib2Error): pass
class CertificateHostnameMismatch(SSLHandshakeError):
def __init__(self, desc, host, cert):
HttpLib2Error.__init__(self, desc)
self.host = host
self.cert = cert
# Open Items:
# -----------
# Proxy support
# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
# Pluggable cache storage (supports storing the cache in
# flat files by default. We need a plug-in architecture
# that can support Berkeley DB and Squid)
# == Known Issues ==
# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
# Does not handle Cache-Control: max-stale
# Does not use Age: headers when calculating cache freshness.
# The number of redirections to follow before giving up.
# Note that only GET redirects are automatically followed.
# Will also honor 301 requests by saving that info and never
# requesting that URI again.
DEFAULT_MAX_REDIRECTS = 5
# Default CA certificates file bundled with httplib2.
CA_CERTS = os.path.join(
os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
# Which headers are hop-by-hop headers by default
HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
def _get_end2end_headers(response):
hopbyhop = list(HOP_BY_HOP)
hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
return [header for header in response.keys() if header not in hopbyhop]
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
def parse_uri(uri):
"""Parses a URI using the regex given in Appendix B of RFC 3986.
(scheme, authority, path, query, fragment) = parse_uri(uri)
"""
groups = URI.match(uri).groups()
return (groups[1], groups[3], groups[4], groups[6], groups[8])
def urlnorm(uri):
(scheme, authority, path, query, fragment) = parse_uri(uri)
if not scheme or not authority:
raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
authority = authority.lower()
scheme = scheme.lower()
if not path:
path = "/"
# Could do syntax based normalization of the URI before
# computing the digest. See Section 6.2.2 of Std 66.
request_uri = query and "?".join([path, query]) or path
scheme = scheme.lower()
defrag_uri = scheme + "://" + authority + request_uri
return scheme, authority, request_uri, defrag_uri
# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
re_url_scheme = re.compile(r'^\w+://')
re_slash = re.compile(r'[?/:|]+')
def safename(filename):
"""Return a filename suitable for the cache.
Strips dangerous and common characters to create a filename we
can use to store the cache in.
"""
try:
if re_url_scheme.match(filename):
if isinstance(filename,str):
filename = filename.decode('utf-8')
filename = filename.encode('idna')
else:
filename = filename.encode('idna')
except UnicodeError:
pass
if isinstance(filename,unicode):
filename=filename.encode('utf-8')
filemd5 = _md5(filename).hexdigest()
filename = re_url_scheme.sub("", filename)
filename = re_slash.sub(",", filename)
# limit length of filename
if len(filename)>200:
filename=filename[:200]
return ",".join((filename, filemd5))
NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
def _normalize_headers(headers):
return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()])
def _parse_cache_control(headers):
retval = {}
if headers.has_key('cache-control'):
parts = headers['cache-control'].split(',')
parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
retval = dict(parts_with_args + parts_wo_args)
return retval
# Whether to use a strict mode to parse WWW-Authenticate headers
# Might lead to bad results in case of ill-formed header value,
# so disabled by default, falling back to relaxed parsing.
# Set to true to turn on, usefull for testing servers.
USE_WWW_AUTH_STRICT_PARSING = 0
# In regex below:
# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP
# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
UNQUOTE_PAIRS = re.compile(r'\\(.)')
def _parse_www_authenticate(headers, headername='www-authenticate'):
"""Returns a dictionary of dictionaries, one dict
per auth_scheme."""
retval = {}
if headers.has_key(headername):
try:
authenticate = headers[headername].strip()
www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
while authenticate:
# Break off the scheme at the beginning of the line
if headername == 'authentication-info':
(auth_scheme, the_rest) = ('digest', authenticate)
else:
(auth_scheme, the_rest) = authenticate.split(" ", 1)
# Now loop over all the key value pairs that come after the scheme,
# being careful not to roll into the next scheme
match = www_auth.search(the_rest)
auth_params = {}
while match:
if match and len(match.groups()) == 3:
(key, value, the_rest) = match.groups()
auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
match = www_auth.search(the_rest)
retval[auth_scheme.lower()] = auth_params
authenticate = the_rest.strip()
except ValueError:
raise MalformedHeader("WWW-Authenticate")
return retval
def _entry_disposition(response_headers, request_headers):
"""Determine freshness from the Date, Expires and Cache-Control headers.
We don't handle the following:
1. Cache-Control: max-stale
2. Age: headers are not used in the calculations.
Not that this algorithm is simpler than you might think
because we are operating as a private (non-shared) cache.
This lets us ignore 's-maxage'. We can also ignore
'proxy-invalidate' since we aren't a proxy.
We will never return a stale document as
fresh as a design decision, and thus the non-implementation
of 'max-stale'. This also lets us safely ignore 'must-revalidate'
since we operate as if every server has sent 'must-revalidate'.
Since we are private we get to ignore both 'public' and
'private' parameters. We also ignore 'no-transform' since
we don't do any transformations.
The 'no-store' parameter is handled at a higher level.
So the only Cache-Control parameters we look at are:
no-cache
only-if-cached
max-age
min-fresh
"""
retval = "STALE"
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
retval = "TRANSPARENT"
if 'cache-control' not in request_headers:
request_headers['cache-control'] = 'no-cache'
elif cc.has_key('no-cache'):
retval = "TRANSPARENT"
elif cc_response.has_key('no-cache'):
retval = "STALE"
elif cc.has_key('only-if-cached'):
retval = "FRESH"
elif response_headers.has_key('date'):
date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
now = time.time()
current_age = max(0, now - date)
if cc_response.has_key('max-age'):
try:
freshness_lifetime = int(cc_response['max-age'])
except ValueError:
freshness_lifetime = 0
elif response_headers.has_key('expires'):
expires = email.Utils.parsedate_tz(response_headers['expires'])
if None == expires:
freshness_lifetime = 0
else:
freshness_lifetime = max(0, calendar.timegm(expires) - date)
else:
freshness_lifetime = 0
if cc.has_key('max-age'):
try:
freshness_lifetime = int(cc['max-age'])
except ValueError:
freshness_lifetime = 0
if cc.has_key('min-fresh'):
try:
min_fresh = int(cc['min-fresh'])
except ValueError:
min_fresh = 0
current_age += min_fresh
if freshness_lifetime > current_age:
retval = "FRESH"
return retval
def _decompressContent(response, new_content):
content = new_content
try:
encoding = response.get('content-encoding', None)
if encoding in ['gzip', 'deflate']:
if encoding == 'gzip':
content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
if encoding == 'deflate':
content = zlib.decompress(content)
response['content-length'] = str(len(content))
# Record the historical presence of the encoding in a way the won't interfere.
response['-content-encoding'] = response['content-encoding']
del response['content-encoding']
except IOError:
content = ""
raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
return content
def _updateCache(request_headers, response_headers, content, cache, cachekey):
if cachekey:
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if cc.has_key('no-store') or cc_response.has_key('no-store'):
cache.delete(cachekey)
else:
info = email.Message.Message()
for key, value in response_headers.iteritems():
if key not in ['status','content-encoding','transfer-encoding']:
info[key] = value
# Add annotations to the cache to indicate what headers
# are variant for this request.
vary = response_headers.get('vary', None)
if vary:
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
try:
info[key] = request_headers[header]
except KeyError:
pass
status = response_headers.status
if status == 304:
status = 200
status_header = 'status: %d\r\n' % status
header_str = info.as_string()
header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
text = "".join([status_header, header_str, content])
cache.set(cachekey, text)
def _cnonce():
dig = _md5("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest()
return dig[:16]
def _wsse_username_token(cnonce, iso_now, password):
return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip()
# For credentials we need two things, first
# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
# Then we also need a list of URIs that have already demanded authentication
# That list is tricky since sub-URIs can take the same auth, or the
# auth scheme may change as you descend the tree.
# So we also need each Auth instance to be able to tell us
# how close to the 'top' it is.
class Authentication(object):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
self.path = path
self.host = host
self.credentials = credentials
self.http = http
def depth(self, request_uri):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return request_uri[len(self.path):].count("/")
def inscope(self, host, request_uri):
# XXX Should we normalize the request_uri?
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return (host == self.host) and path.startswith(self.path)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header. Over-ride this in sub-classes."""
pass
def response(self, response, content):
"""Gives us a chance to update with new nonces
or such returned from the last authorized response.
Over-rise this in sub-classes if necessary.
Return TRUE is the request is to be retried, for
example Digest may return stale=true.
"""
return False
class BasicAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'Basic ' + base64.b64encode("%s:%s" % self.credentials).strip()
class DigestAuthentication(Authentication):
"""Only do qop='auth' and MD5, since that
is all Apache currently implements"""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['digest']
qop = self.challenge.get('qop', 'auth')
self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
if self.challenge['qop'] is None:
raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
if self.challenge['algorithm'] != 'MD5':
raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
self.challenge['nc'] = 1
def request(self, method, request_uri, headers, content, cnonce = None):
"""Modify the request headers"""
H = lambda x: _md5(x).hexdigest()
KD = lambda s, d: H("%s:%s" % (s, d))
A2 = "".join([method, ":", request_uri])
self.challenge['cnonce'] = cnonce or _cnonce()
request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (self.challenge['nonce'],
'%08x' % self.challenge['nc'],
self.challenge['cnonce'],
self.challenge['qop'], H(A2)
))
headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['nonce'],
request_uri,
self.challenge['algorithm'],
request_digest,
self.challenge['qop'],
self.challenge['nc'],
self.challenge['cnonce'],
)
if self.challenge.get('opaque'):
headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
self.challenge['nc'] += 1
def response(self, response, content):
if not response.has_key('authentication-info'):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
if 'true' == challenge.get('stale'):
self.challenge['nonce'] = challenge['nonce']
self.challenge['nc'] = 1
return True
else:
updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
if updated_challenge.has_key('nextnonce'):
self.challenge['nonce'] = updated_challenge['nextnonce']
self.challenge['nc'] = 1
return False
class HmacDigestAuthentication(Authentication):
"""Adapted from Robert Sayre's code and DigestAuthentication above."""
__author__ = "Thomas Broyer ([email protected])"
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['hmacdigest']
# TODO: self.challenge['domain']
self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
if self.challenge['reason'] not in ['unauthorized', 'integrity']:
self.challenge['reason'] = 'unauthorized'
self.challenge['salt'] = self.challenge.get('salt', '')
if not self.challenge.get('snonce'):
raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
if self.challenge['algorithm'] == 'HMAC-MD5':
self.hashmod = _md5
else:
self.hashmod = _sha
if self.challenge['pw-algorithm'] == 'MD5':
self.pwhashmod = _md5
else:
self.pwhashmod = _sha
self.key = "".join([self.credentials[0], ":",
self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
":", self.challenge['realm']
])
self.key = self.pwhashmod.new(self.key).hexdigest().lower()
def request(self, method, request_uri, headers, content):
"""Modify the request headers"""
keys = _get_end2end_headers(headers)
keylist = "".join(["%s " % k for k in keys])
headers_val = "".join([headers[k] for k in keys])
created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
cnonce = _cnonce()
request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['snonce'],
cnonce,
request_uri,
created,
request_digest,
keylist,
)
def response(self, response, content):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
if challenge.get('reason') in ['integrity', 'stale']:
return True
return False
class WsseAuthentication(Authentication):
"""This is thinly tested and should not be relied upon.
At this time there isn't any third party server to test against.
Blogger and TypePad implemented this algorithm at one point
but Blogger has since switched to Basic over HTTPS and
TypePad has implemented it wrong, by never issuing a 401
challenge but instead requiring your client to telepathically know that
their endpoint is expecting WSSE profile="UsernameToken"."""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'WSSE profile="UsernameToken"'
iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
cnonce = _cnonce()
password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
self.credentials[0],
password_digest,
cnonce,
iso_now)
class GoogleLoginAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
from urllib import urlencode
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
service = challenge['googlelogin'].get('service', 'xapi')
# Bloggger actually returns the service in the challenge
# For the rest we guess based on the URI
if service == 'xapi' and request_uri.find("calendar") > 0:
service = "cl"
# No point in guessing Base or Spreadsheet
#elif request_uri.find("spreadsheets") > 0:
# service = "wise"
auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
lines = content.split('\n')
d = dict([tuple(line.split("=", 1)) for line in lines if line])
if resp.status == 403:
self.Auth = ""
else:
self.Auth = d['Auth']
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
AUTH_SCHEME_CLASSES = {
"basic": BasicAuthentication,
"wsse": WsseAuthentication,
"digest": DigestAuthentication,
"hmacdigest": HmacDigestAuthentication,
"googlelogin": GoogleLoginAuthentication
}
AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
class FileCache(object):
"""Uses a local directory as a store for cached files.
Not really safe to use if multiple threads or processes are going to
be running on the same cache.
"""
def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
self.cache = cache
self.safe = safe
if not os.path.exists(cache):
os.makedirs(self.cache)
def get(self, key):
retval = None
cacheFullPath = os.path.join(self.cache, self.safe(key))
try:
f = file(cacheFullPath, "rb")
retval = f.read()
f.close()
except IOError:
pass
return retval
def set(self, key, value):
cacheFullPath = os.path.join(self.cache, self.safe(key))
f = file(cacheFullPath, "wb")
f.write(value)
f.close()
def delete(self, key):
cacheFullPath = os.path.join(self.cache, self.safe(key))
if os.path.exists(cacheFullPath):
os.remove(cacheFullPath)
class Credentials(object):
def __init__(self):
self.credentials = []
def add(self, name, password, domain=""):
self.credentials.append((domain.lower(), name, password))
def clear(self):
self.credentials = []
def iter(self, domain):
for (cdomain, name, password) in self.credentials:
if cdomain == "" or domain == cdomain:
yield (name, password)
class KeyCerts(Credentials):
"""Identical to Credentials except that
name/password are mapped to key/cert."""
pass
class AllHosts(object):
pass
class ProxyInfo(object):
"""Collect information required to use a proxy."""
bypass_hosts = ()
def __init__(self, proxy_type, proxy_host, proxy_port,
proxy_rdns=None, proxy_user=None, proxy_pass=None):
"""The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX
constants. For example:
p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP,
proxy_host='localhost', proxy_port=8000)
"""
self.proxy_type = proxy_type
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_rdns = proxy_rdns
self.proxy_user = proxy_user
self.proxy_pass = proxy_pass
def astuple(self):
return (self.proxy_type, self.proxy_host, self.proxy_port,
self.proxy_rdns, self.proxy_user, self.proxy_pass)
def isgood(self):
return (self.proxy_host != None) and (self.proxy_port != None)
def applies_to(self, hostname):
return not self.bypass_host(hostname)
def bypass_host(self, hostname):
"""Has this host been excluded from the proxy config"""
if self.bypass_hosts is AllHosts:
return True
bypass = False
for domain in self.bypass_hosts:
if hostname.endswith(domain):
bypass = True
return bypass
def proxy_info_from_environment(method='http'):
"""
Read proxy info from the environment variables.
"""
if method not in ['http', 'https']:
return
env_var = method + '_proxy'
url = os.environ.get(env_var, os.environ.get(env_var.upper()))
if not url:
return
pi = proxy_info_from_url(url, method)
no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
bypass_hosts = []
if no_proxy:
bypass_hosts = no_proxy.split(',')
# special case, no_proxy=* means all hosts bypassed
if no_proxy == '*':
bypass_hosts = AllHosts
pi.bypass_hosts = bypass_hosts
return pi
def proxy_info_from_url(url, method='http'):
"""
Construct a ProxyInfo from a URL (such as http_proxy env var)
"""
url = urlparse.urlparse(url)
username = None
password = None
port = None
if '@' in url[1]:
ident, host_port = url[1].split('@', 1)
if ':' in ident:
username, password = ident.split(':', 1)
else:
password = ident
else:
host_port = url[1]
if ':' in host_port:
host, port = host_port.split(':', 1)
else:
host = host_port
if port:
port = int(port)
else:
port = dict(https=443, http=80)[method]
proxy_type = 3 # socks.PROXY_TYPE_HTTP
return ProxyInfo(
proxy_type = proxy_type,
proxy_host = host,
proxy_port = port,
proxy_user = username or None,
proxy_pass = password or None,
)
class HTTPConnectionWithTimeout(httplib.HTTPConnection):
"""
HTTPConnection subclass that supports timeouts
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
httplib.HTTPConnection.__init__(self, host, port, strict)
self.timeout = timeout
self.proxy_info = proxy_info
def connect(self):
"""Connect to the host and port specified in __init__."""
# Mostly verbatim from httplib.py.
if self.proxy_info and socks is None:
raise ProxiesUnavailableError(
'Proxy support missing but proxy use was requested!')
msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
else:
use_proxy = False
if use_proxy and proxy_rdns:
host = proxy_host
port = proxy_port
else:
host = self.host
port = self.port
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
if use_proxy:
self.sock = socks.socksocket(af, socktype, proto)
self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
else:
self.sock = socket.socket(af, socktype, proto)
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# Different from httplib: support timeouts.
if has_timeout(self.timeout):
self.sock.settimeout(self.timeout)
# End of difference from httplib.
if self.debuglevel > 0:
print "connect: (%s, %s) ************" % (self.host, self.port)
if use_proxy:
print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
self.sock.connect((self.host, self.port) + sa[2:])
except socket.error, msg:
if self.debuglevel > 0:
print "connect fail: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
"""
This class allows communication via SSL.
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None,
ca_certs=None, disable_ssl_certificate_validation=False):
httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file,
cert_file=cert_file, strict=strict)
self.timeout = timeout
self.proxy_info = proxy_info
if ca_certs is None:
ca_certs = CA_CERTS
self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
# The following two methods were adapted from https_wrapper.py, released
# with the Google Appengine SDK at
# http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py
# under the following license:
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def _GetValidHostsForCert(self, cert):
"""Returns a list of valid host globs for an SSL certificate.
Args:
cert: A dictionary representing an SSL certificate.
Returns:
list: A list of valid host globs.
"""
if 'subjectAltName' in cert:
return [x[1] for x in cert['subjectAltName']
if x[0].lower() == 'dns']
else:
return [x[0][1] for x in cert['subject']
if x[0][0].lower() == 'commonname']
def _ValidateCertificateHostname(self, cert, hostname):
"""Validates that a given hostname is valid for an SSL certificate.
Args:
cert: A dictionary representing an SSL certificate.
hostname: The hostname to test.
Returns:
bool: Whether or not the hostname is valid for this certificate.
"""
hosts = self._GetValidHostsForCert(cert)
for host in hosts:
host_re = host.replace('.', '\.').replace('*', '[^.]*')
if re.search('^%s$' % (host_re,), hostname, re.I):
return True
return False
def connect(self):
"Connect to a host on a given (SSL) port."
msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
else:
use_proxy = False
if use_proxy and proxy_rdns:
host = proxy_host
port = proxy_port
else:
host = self.host
port = self.port
for family, socktype, proto, canonname, sockaddr in socket.getaddrinfo(
host, port, 0, socket.SOCK_STREAM):
try:
if use_proxy:
sock = socks.socksocket(family, socktype, proto)
sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
else:
sock = socket.socket(family, socktype, proto)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if has_timeout(self.timeout):
sock.settimeout(self.timeout)
sock.connect((self.host, self.port))
self.sock =_ssl_wrap_socket(
sock, self.key_file, self.cert_file,
self.disable_ssl_certificate_validation, self.ca_certs)
if self.debuglevel > 0:
print "connect: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if not self.disable_ssl_certificate_validation:
cert = self.sock.getpeercert()
hostname = self.host.split(':', 0)[0]
if not self._ValidateCertificateHostname(cert, hostname):
raise CertificateHostnameMismatch(
'Server presented certificate that does not match '
'host %s: %s' % (hostname, cert), hostname, cert)
except ssl_SSLError, e:
if sock:
sock.close()
if self.sock:
self.sock.close()
self.sock = None
# Unfortunately the ssl module doesn't seem to provide any way
# to get at more detailed error information, in particular
# whether the error is due to certificate validation or
# something else (such as SSL protocol mismatch).
if e.errno == ssl.SSL_ERROR_SSL:
raise SSLHandshakeError(e)
else:
raise
except (socket.timeout, socket.gaierror):
raise
except socket.error, msg:
if self.debuglevel > 0:
print "connect fail: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
SCHEME_TO_CONNECTION = {
'http': HTTPConnectionWithTimeout,
'https': HTTPSConnectionWithTimeout
}
# Use a different connection object for Google App Engine
try:
from google.appengine.api import apiproxy_stub_map
if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
raise ImportError # Bail out; we're not actually running on App Engine.
from google.appengine.api.urlfetch import fetch
from google.appengine.api.urlfetch import InvalidURLError
class ResponseDict(dict):
"""Dictionary with a read() method; can pass off as httplib.HTTPResponse."""
def __init__(self, *args, **kwargs):
self.content = kwargs.pop('content', None)
return super(ResponseDict, self).__init__(*args, **kwargs)
def read(self):
return self.content
class AppEngineHttpConnection(object):
"""Emulates an httplib.HTTPConnection object, but actually uses the Google
App Engine urlfetch library. This allows the timeout to be properly used on
Google App Engine, and avoids using httplib, which on Google App Engine is
just another wrapper around urlfetch.
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False):
self.host = host
self.port = port
self.timeout = timeout
if key_file or cert_file or proxy_info or ca_certs:
raise NotSupportedOnThisPlatform()
self.response = None
self.scheme = 'http'
self.validate_certificate = not disable_ssl_certificate_validation
self.sock = True
def request(self, method, url, body, headers):
# Calculate the absolute URI, which fetch requires
netloc = self.host
if self.port:
netloc = '%s:%s' % (self.host, self.port)
absolute_uri = '%s://%s%s' % (self.scheme, netloc, url)
try:
try: # 'body' can be a stream.
body = body.read()
except AttributeError:
pass
response = fetch(absolute_uri, payload=body, method=method,
headers=headers, allow_truncated=False, follow_redirects=False,
deadline=self.timeout,
validate_certificate=self.validate_certificate)
self.response = ResponseDict(response.headers, content=response.content)
self.response['status'] = str(response.status_code)
self.response['reason'] = httplib.responses.get(response.status_code, 'Ok')
self.response.status = response.status_code
# Make sure the exceptions raised match the exceptions expected.
except InvalidURLError:
raise socket.gaierror('')
def getresponse(self):
if self.response:
return self.response
else:
raise httplib.HTTPException()
def set_debuglevel(self, level):
pass
def connect(self):
pass
def close(self):
pass
class AppEngineHttpsConnection(AppEngineHttpConnection):
"""Same as AppEngineHttpConnection, but for HTTPS URIs."""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False):
AppEngineHttpConnection.__init__(self, host, port, key_file, cert_file,
strict, timeout, proxy_info, ca_certs, disable_ssl_certificate_validation)
self.scheme = 'https'
# Update the connection classes to use the Googel App Engine specific ones.
SCHEME_TO_CONNECTION = {
'http': AppEngineHttpConnection,
'https': AppEngineHttpsConnection
}
except ImportError:
pass
class Http(object):
"""An HTTP client that handles:
- all methods
- caching
- ETags
- compression,
- HTTPS
- Basic
- Digest
- WSSE
and more.
"""
def __init__(self, cache=None, timeout=None,
proxy_info=proxy_info_from_environment,
ca_certs=None, disable_ssl_certificate_validation=False):
"""If 'cache' is a string then it is used as a directory name for
a disk cache. Otherwise it must be an object that supports the
same interface as FileCache.
All timeouts are in seconds. If None is passed for timeout
then Python's default timeout for sockets will be used. See
for example the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
`proxy_info` may be:
- a callable that takes the http scheme ('http' or 'https') and
returns a ProxyInfo instance per request. By default, uses
proxy_nfo_from_environment.
- a ProxyInfo instance (static proxy config).
- None (proxy disabled).
ca_certs is the path of a file containing root CA certificates for SSL
server certificate validation. By default, a CA cert file bundled with
httplib2 is used.
If disable_ssl_certificate_validation is true, SSL cert validation will
not be performed.
"""
self.proxy_info = proxy_info
self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
# Map domain name to an httplib connection
self.connections = {}
# The location of the cache, for now a directory
# where cached responses are held.
if cache and isinstance(cache, basestring):
self.cache = FileCache(cache)
else:
self.cache = cache
# Name/password
self.credentials = Credentials()
# Key/cert
self.certificates = KeyCerts()
# authorization objects
self.authorizations = []
# If set to False then no redirects are followed, even safe ones.
self.follow_redirects = True
# Which HTTP methods do we apply optimistic concurrency to, i.e.
# which methods get an "if-match:" etag header added to them.
self.optimistic_concurrency_methods = ["PUT", "PATCH"]
# If 'follow_redirects' is True, and this is set to True then
# all redirecs are followed, including unsafe ones.
self.follow_all_redirects = False
self.ignore_etag = False
self.force_exception_to_status_code = False
self.timeout = timeout
# Keep Authorization: headers on a redirect.
self.forward_authorization_headers = False
def __getstate__(self):
state_dict = copy.copy(self.__dict__)
# In case request is augmented by some foreign object such as
# credentials which handle auth
if 'request' in state_dict:
del state_dict['request']
if 'connections' in state_dict:
del state_dict['connections']
return state_dict
def __setstate__(self, state):
self.__dict__.update(state)
self.connections = {}
def _auth_from_challenge(self, host, request_uri, headers, response, content):
"""A generator that creates Authorization objects
that can be applied to requests.
"""
challenges = _parse_www_authenticate(response, 'www-authenticate')
for cred in self.credentials.iter(host):
for scheme in AUTH_SCHEME_ORDER:
if challenges.has_key(scheme):
yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
def add_credentials(self, name, password, domain=""):
"""Add a name and password that will be used
any time a request requires authentication."""
self.credentials.add(name, password, domain)
def add_certificate(self, key, cert, domain):
"""Add a key and cert that will be used
any time a request requires authentication."""
self.certificates.add(key, cert, domain)
def clear_credentials(self):
"""Remove all the names and passwords
that are used for authentication"""
self.credentials.clear()
self.authorizations = []
def _conn_request(self, conn, request_uri, method, body, headers):
for i in range(RETRIES):
try:
if conn.sock is None:
conn.connect()
conn.request(method, request_uri, body, headers)
except socket.timeout:
raise
except socket.gaierror:
conn.close()
raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
except ssl_SSLError:
conn.close()
raise
except socket.error, e:
err = 0
if hasattr(e, 'args'):
err = getattr(e, 'args')[0]
else:
err = e.errno
if err == errno.ECONNREFUSED: # Connection refused
raise
except httplib.HTTPException:
# Just because the server closed the connection doesn't apparently mean
# that the server didn't send a response.
if conn.sock is None:
if i < RETRIES-1:
conn.close()
conn.connect()
continue
else:
conn.close()
raise
if i < RETRIES-1:
conn.close()
conn.connect()
continue
try:
response = conn.getresponse()
except (socket.error, httplib.HTTPException):
if i < RETRIES-1:
conn.close()
conn.connect()
continue
else:
raise
else:
content = ""
if method == "HEAD":
conn.close()
else:
content = response.read()
response = Response(response)
if method != "HEAD":
content = _decompressContent(response, content)
break
return (response, content)
def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
"""Do the actual request using the connection object
and also follow one level of redirects if necessary"""
auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
auth = auths and sorted(auths)[0][1] or None
if auth:
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
if auth:
if auth.response(response, body):
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers )
response._stale_digest = 1
if response.status == 401:
for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
authorization.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers, )
if response.status != 401:
self.authorizations.append(authorization)
authorization.response(response, body)
break
if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
# Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth'
if redirections:
if not response.has_key('location') and response.status != 300:
raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
# Fix-up relative redirects (which violate an RFC 2616 MUST)
if response.has_key('location'):
location = response['location']
(scheme, authority, path, query, fragment) = parse_uri(location)
if authority == None:
response['location'] = urlparse.urljoin(absolute_uri, location)
if response.status == 301 and method in ["GET", "HEAD"]:
response['-x-permanent-redirect-url'] = response['location']
if not response.has_key('content-location'):
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
if headers.has_key('if-none-match'):
del headers['if-none-match']
if headers.has_key('if-modified-since'):
del headers['if-modified-since']
if 'authorization' in headers and not self.forward_authorization_headers:
del headers['authorization']
if response.has_key('location'):
location = response['location']
old_response = copy.deepcopy(response)
if not old_response.has_key('content-location'):
old_response['content-location'] = absolute_uri
redirect_method = method
if response.status in [302, 303]:
redirect_method = "GET"
body = None
(response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1)
response.previous = old_response
else:
raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
elif response.status in [200, 203] and method in ["GET", "HEAD"]:
# Don't cache 206's since we aren't going to handle byte range requests
if not response.has_key('content-location'):
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
return (response, content)
def _normalize_headers(self, headers):
return _normalize_headers(headers)
# Need to catch and rebrand some exceptions
# Then need to optionally turn all exceptions into status codes
# including all socket.* and httplib.* exceptions.
def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
""" Performs a single HTTP request.
The 'uri' is the URI of the HTTP resource and can begin
with either 'http' or 'https'. The value of 'uri' must be an absolute URI.
The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc.
There is no restriction on the methods allowed.
The 'body' is the entity body to be sent with the request. It is a string
object.
Any extra headers that are to be sent with the request should be provided in the
'headers' dictionary.
The maximum number of redirect to follow before raising an
exception is 'redirections. The default is 5.
The return value is a tuple of (response, content), the first
being and instance of the 'Response' class, the second being
a string that contains the response entity body.
"""
try:
if headers is None:
headers = {}
else:
headers = self._normalize_headers(headers)
if not headers.has_key('user-agent'):
headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
uri = iri2uri(uri)
(scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
domain_port = authority.split(":")[0:2]
if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
scheme = 'https'
authority = domain_port[0]
proxy_info = self._get_proxy_info(scheme, authority)
conn_key = scheme+":"+authority
if conn_key in self.connections:
conn = self.connections[conn_key]
else:
if not connection_type:
connection_type = SCHEME_TO_CONNECTION[scheme]
certs = list(self.certificates.iter(authority))
if scheme == 'https':
if certs:
conn = self.connections[conn_key] = connection_type(
authority, key_file=certs[0][0],
cert_file=certs[0][1], timeout=self.timeout,
proxy_info=proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=proxy_info)
conn.set_debuglevel(debuglevel)
if 'range' not in headers and 'accept-encoding' not in headers:
headers['accept-encoding'] = 'gzip, deflate'
info = email.Message.Message()
cached_value = None
if self.cache:
cachekey = defrag_uri
cached_value = self.cache.get(cachekey)
if cached_value:
# info = email.message_from_string(cached_value)
#
# Need to replace the line above with the kludge below
# to fix the non-existent bug not fixed in this
# bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html
try:
info, content = cached_value.split('\r\n\r\n', 1)
feedparser = email.FeedParser.FeedParser()
feedparser.feed(info)
info = feedparser.close()
feedparser._parse = None
except (IndexError, ValueError):
self.cache.delete(cachekey)
cachekey = None
cached_value = None
else:
cachekey = None
if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers:
# http://www.w3.org/1999/04/Editing/
headers['if-match'] = info['etag']
if method not in ["GET", "HEAD"] and self.cache and cachekey:
# RFC 2616 Section 13.10
self.cache.delete(cachekey)
# Check the vary header in the cache to see if this request
# matches what varies in the cache.
if method in ['GET', 'HEAD'] and 'vary' in info:
vary = info['vary']
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
value = info[key]
if headers.get(header, None) != value:
cached_value = None
break
if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
if info.has_key('-x-permanent-redirect-url'):
# Should cached permanent redirects be counted in our redirection count? For now, yes.
if redirections <= 0:
raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
(response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1)
response.previous = Response(info)
response.previous.fromcache = True
else:
# Determine our course of action:
# Is the cached entry fresh or stale?
# Has the client requested a non-cached response?
#
# There seems to be three possible answers:
# 1. [FRESH] Return the cache entry w/o doing a GET
# 2. [STALE] Do the GET (but add in cache validators if available)
# 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
entry_disposition = _entry_disposition(info, headers)
if entry_disposition == "FRESH":
if not cached_value:
info['status'] = '504'
content = ""
response = Response(info)
if cached_value:
response.fromcache = True
return (response, content)
if entry_disposition == "STALE":
if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers:
headers['if-none-match'] = info['etag']
if info.has_key('last-modified') and not 'last-modified' in headers:
headers['if-modified-since'] = info['last-modified']
elif entry_disposition == "TRANSPARENT":
pass
(response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
if response.status == 304 and method == "GET":
# Rewrite the cache entry with the new end-to-end headers
# Take all headers that are in response
# and overwrite their values in info.
# unless they are hop-by-hop, or are listed in the connection header.
for key in _get_end2end_headers(response):
info[key] = response[key]
merged_response = Response(info)
if hasattr(response, "_stale_digest"):
merged_response._stale_digest = response._stale_digest
_updateCache(headers, merged_response, content, self.cache, cachekey)
response = merged_response
response.status = 200
response.fromcache = True
elif response.status == 200:
content = new_content
else:
self.cache.delete(cachekey)
content = new_content
else:
cc = _parse_cache_control(headers)
if cc.has_key('only-if-cached'):
info['status'] = '504'
response = Response(info)
content = ""
else:
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
except Exception, e:
if self.force_exception_to_status_code:
if isinstance(e, HttpLib2ErrorWithResponse):
response = e.response
content = e.content
response.status = 500
response.reason = str(e)
elif isinstance(e, socket.timeout):
content = "Request Timeout"
response = Response( {
"content-type": "text/plain",
"status": "408",
"content-length": len(content)
})
response.reason = "Request Timeout"
else:
content = str(e)
response = Response( {
"content-type": "text/plain",
"status": "400",
"content-length": len(content)
})
response.reason = "Bad Request"
else:
raise
return (response, content)
def _get_proxy_info(self, scheme, authority):
"""Return a ProxyInfo instance (or None) based on the scheme
and authority.
"""
hostname, port = urllib.splitport(authority)
proxy_info = self.proxy_info
if callable(proxy_info):
proxy_info = proxy_info(scheme)
if (hasattr(proxy_info, 'applies_to')
and not proxy_info.applies_to(hostname)):
proxy_info = None
return proxy_info
class Response(dict):
"""An object more like email.Message than httplib.HTTPResponse."""
"""Is this response from our local cache"""
fromcache = False
"""HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
version = 11
"Status code returned by server. "
status = 200
"""Reason phrase returned by server."""
reason = "Ok"
previous = None
def __init__(self, info):
# info is either an email.Message or
# an httplib.HTTPResponse object.
if isinstance(info, httplib.HTTPResponse):
for key, value in info.getheaders():
self[key.lower()] = value
self.status = info.status
self['status'] = str(self.status)
self.reason = info.reason
self.version = info.version
elif isinstance(info, email.Message.Message):
for key, value in info.items():
self[key.lower()] = value
self.status = int(self['status'])
else:
for key, value in info.iteritems():
self[key.lower()] = value
self.status = int(self.get('status', self.status))
self.reason = self.get('reason', self.reason)
def __getattr__(self, name):
if name == 'dict':
return self
else:
raise AttributeError, name
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/thirdparty/httplib2/__init__.py
|
__init__.py
|
Minor modifications made by Christopher Gilbert (http://motomastyle.com/)
for use in PyLoris (http://pyloris.sourceforge.net/)
Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
mainly to merge bug fixes found in Sourceforge
"""
import base64
import socket
import struct
import sys
if getattr(socket, 'socket', None) is None:
raise ImportError('socket.socket missing, proxy support unusable')
PROXY_TYPE_SOCKS4 = 1
PROXY_TYPE_SOCKS5 = 2
PROXY_TYPE_HTTP = 3
PROXY_TYPE_HTTP_NO_TUNNEL = 4
_defaultproxy = None
_orgsocket = socket.socket
class ProxyError(Exception): pass
class GeneralProxyError(ProxyError): pass
class Socks5AuthError(ProxyError): pass
class Socks5Error(ProxyError): pass
class Socks4Error(ProxyError): pass
class HTTPError(ProxyError): pass
_generalerrors = ("success",
"invalid data",
"not connected",
"not available",
"bad proxy type",
"bad input")
_socks5errors = ("succeeded",
"general SOCKS server failure",
"connection not allowed by ruleset",
"Network unreachable",
"Host unreachable",
"Connection refused",
"TTL expired",
"Command not supported",
"Address type not supported",
"Unknown error")
_socks5autherrors = ("succeeded",
"authentication is required",
"all offered authentication methods were rejected",
"unknown username or invalid password",
"unknown error")
_socks4errors = ("request granted",
"request rejected or failed",
"request rejected because SOCKS server cannot connect to identd on the client",
"request rejected because the client program and identd report different user-ids",
"unknown error")
def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets a default proxy which all further socksocket objects will use,
unless explicitly changed.
"""
global _defaultproxy
_defaultproxy = (proxytype, addr, port, rdns, username, password)
def wrapmodule(module):
"""wrapmodule(module)
Attempts to replace a module's socket library with a SOCKS socket. Must set
a default proxy using setdefaultproxy(...) first.
This will only work on modules that import socket directly into the namespace;
most of the Python Standard Library falls into this category.
"""
if _defaultproxy != None:
module.socket.socket = socksocket
else:
raise GeneralProxyError((4, "no proxy specified"))
class socksocket(socket.socket):
"""socksocket([family[, type[, proto]]]) -> socket object
Open a SOCKS enabled socket. The parameters are the same as
those of the standard socket init. In order for SOCKS to work,
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
"""
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
_orgsocket.__init__(self, family, type, proto, _sock)
if _defaultproxy != None:
self.__proxy = _defaultproxy
else:
self.__proxy = (None, None, None, None, None, None)
self.__proxysockname = None
self.__proxypeername = None
self.__httptunnel = True
def __recvall(self, count):
"""__recvall(count) -> data
Receive EXACTLY the number of bytes requested from the socket.
Blocks until the required number of bytes have been received.
"""
data = self.recv(count)
while len(data) < count:
d = self.recv(count-len(data))
if not d: raise GeneralProxyError((0, "connection closed unexpectedly"))
data = data + d
return data
def sendall(self, content, *args):
""" override socket.socket.sendall method to rewrite the header
for non-tunneling proxies if needed
"""
if not self.__httptunnel:
content = self.__rewriteproxy(content)
return super(socksocket, self).sendall(content, *args)
def __rewriteproxy(self, header):
""" rewrite HTTP request headers to support non-tunneling proxies
(i.e. those which do not support the CONNECT method).
This only works for HTTP (not HTTPS) since HTTPS requires tunneling.
"""
host, endpt = None, None
hdrs = header.split("\r\n")
for hdr in hdrs:
if hdr.lower().startswith("host:"):
host = hdr
elif hdr.lower().startswith("get") or hdr.lower().startswith("post"):
endpt = hdr
if host and endpt:
hdrs.remove(host)
hdrs.remove(endpt)
host = host.split(" ")[1]
endpt = endpt.split(" ")
if (self.__proxy[4] != None and self.__proxy[5] != None):
hdrs.insert(0, self.__getauthheader())
hdrs.insert(0, "Host: %s" % host)
hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2]))
return "\r\n".join(hdrs)
def __getauthheader(self):
auth = self.__proxy[4] + ":" + self.__proxy[5]
return "Proxy-Authorization: Basic " + base64.b64encode(auth)
def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets the proxy to be used.
proxytype - The type of the proxy to be used. Three types
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
addr - The address of the server (IP or DNS).
port - The port of the server. Defaults to 1080 for SOCKS
servers and 8080 for HTTP proxy servers.
rdns - Should DNS queries be preformed on the remote side
(rather than the local side). The default is True.
Note: This has no effect with SOCKS4 servers.
username - Username to authenticate with to the server.
The default is no authentication.
password - Password to authenticate with to the server.
Only relevant when username is also provided.
"""
self.__proxy = (proxytype, addr, port, rdns, username, password)
def __negotiatesocks5(self, destaddr, destport):
"""__negotiatesocks5(self,destaddr,destport)
Negotiates a connection through a SOCKS5 server.
"""
# First we'll send the authentication packages we support.
if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
# The username/password details were supplied to the
# setproxy method so we support the USERNAME/PASSWORD
# authentication (in addition to the standard none).
self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02))
else:
# No username/password were entered, therefore we
# only support connections with no authentication.
self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00))
# We'll receive the server's response to determine which
# method was selected
chosenauth = self.__recvall(2)
if chosenauth[0:1] != chr(0x05).encode():
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
# Check the chosen authentication method
if chosenauth[1:2] == chr(0x00).encode():
# No authentication is required
pass
elif chosenauth[1:2] == chr(0x02).encode():
# Okay, we need to perform a basic username/password
# authentication.
self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
authstat = self.__recvall(2)
if authstat[0:1] != chr(0x01).encode():
# Bad response
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if authstat[1:2] != chr(0x00).encode():
# Authentication failed
self.close()
raise Socks5AuthError((3, _socks5autherrors[3]))
# Authentication succeeded
else:
# Reaching here is always bad
self.close()
if chosenauth[1] == chr(0xFF).encode():
raise Socks5AuthError((2, _socks5autherrors[2]))
else:
raise GeneralProxyError((1, _generalerrors[1]))
# Now we can request the actual connection
req = struct.pack('BBB', 0x05, 0x01, 0x00)
# If the given destination address is an IP address, we'll
# use the IPv4 address request even if remote resolving was specified.
try:
ipaddr = socket.inet_aton(destaddr)
req = req + chr(0x01).encode() + ipaddr
except socket.error:
# Well it's not an IP number, so it's probably a DNS name.
if self.__proxy[3]:
# Resolve remotely
ipaddr = None
req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr
else:
# Resolve locally
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
req = req + chr(0x01).encode() + ipaddr
req = req + struct.pack(">H", destport)
self.sendall(req)
# Get the response
resp = self.__recvall(4)
if resp[0:1] != chr(0x05).encode():
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
elif resp[1:2] != chr(0x00).encode():
# Connection failed
self.close()
if ord(resp[1:2])<=8:
raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])]))
else:
raise Socks5Error((9, _socks5errors[9]))
# Get the bound address/port
elif resp[3:4] == chr(0x01).encode():
boundaddr = self.__recvall(4)
elif resp[3:4] == chr(0x03).encode():
resp = resp + self.recv(1)
boundaddr = self.__recvall(ord(resp[4:5]))
else:
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
boundport = struct.unpack(">H", self.__recvall(2))[0]
self.__proxysockname = (boundaddr, boundport)
if ipaddr != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def getproxysockname(self):
"""getsockname() -> address info
Returns the bound IP address and port number at the proxy.
"""
return self.__proxysockname
def getproxypeername(self):
"""getproxypeername() -> address info
Returns the IP and port number of the proxy.
"""
return _orgsocket.getpeername(self)
def getpeername(self):
"""getpeername() -> address info
Returns the IP address and port number of the destination
machine (note: getproxypeername returns the proxy)
"""
return self.__proxypeername
def __negotiatesocks4(self,destaddr,destport):
"""__negotiatesocks4(self,destaddr,destport)
Negotiates a connection through a SOCKS4 server.
"""
# Check if the destination address provided is an IP address
rmtrslv = False
try:
ipaddr = socket.inet_aton(destaddr)
except socket.error:
# It's a DNS name. Check where it should be resolved.
if self.__proxy[3]:
ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01)
rmtrslv = True
else:
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
# Construct the request packet
req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr
# The username parameter is considered userid for SOCKS4
if self.__proxy[4] != None:
req = req + self.__proxy[4]
req = req + chr(0x00).encode()
# DNS name if remote resolving is required
# NOTE: This is actually an extension to the SOCKS4 protocol
# called SOCKS4A and may not be supported in all cases.
if rmtrslv:
req = req + destaddr + chr(0x00).encode()
self.sendall(req)
# Get the response from the server
resp = self.__recvall(8)
if resp[0:1] != chr(0x00).encode():
# Bad data
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
if resp[1:2] != chr(0x5A).encode():
# Server returned an error
self.close()
if ord(resp[1:2]) in (91, 92, 93):
self.close()
raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90]))
else:
raise Socks4Error((94, _socks4errors[4]))
# Get the bound address/port
self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0])
if rmtrslv != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def __negotiatehttp(self, destaddr, destport):
"""__negotiatehttp(self,destaddr,destport)
Negotiates a connection through an HTTP server.
"""
# If we need to resolve locally, we do this now
if not self.__proxy[3]:
addr = socket.gethostbyname(destaddr)
else:
addr = destaddr
headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
headers += ["Host: ", destaddr, "\r\n"]
if (self.__proxy[4] != None and self.__proxy[5] != None):
headers += [self.__getauthheader(), "\r\n"]
headers.append("\r\n")
self.sendall("".join(headers).encode())
# We read the response until we get the string "\r\n\r\n"
resp = self.recv(1)
while resp.find("\r\n\r\n".encode()) == -1:
resp = resp + self.recv(1)
# We just need the first line to check if the connection
# was successful
statusline = resp.splitlines()[0].split(" ".encode(), 2)
if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()):
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
try:
statuscode = int(statusline[1])
except ValueError:
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if statuscode != 200:
self.close()
raise HTTPError((statuscode, statusline[2]))
self.__proxysockname = ("0.0.0.0", 0)
self.__proxypeername = (addr, destport)
def connect(self, destpair):
"""connect(self, despair)
Connects to the specified destination through a proxy.
destpar - A tuple of the IP/DNS address and the port number.
(identical to socket's connect).
To select the proxy server use setproxy().
"""
# Do a minimal input check first
if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int):
raise GeneralProxyError((5, _generalerrors[5]))
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatesocks5(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self,(self.__proxy[1], portnum))
self.__negotiatesocks4(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 8080
_orgsocket.connect(self,(self.__proxy[1], portnum))
self.__negotiatehttp(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 8080
_orgsocket.connect(self,(self.__proxy[1],portnum))
if destpair[1] == 443:
self.__negotiatehttp(destpair[0],destpair[1])
else:
self.__httptunnel = False
elif self.__proxy[0] == None:
_orgsocket.connect(self, (destpair[0], destpair[1]))
else:
raise GeneralProxyError((4, _generalerrors[4]))
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/thirdparty/httplib2/socks.py
|
socks.py
|
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
# This is a reimplementation of plat_other.py with reference to the
# freedesktop.org trash specification:
# [1] http://www.freedesktop.org/wiki/Specifications/trash-spec
# [2] http://www.ramendik.ru/docs/trashspec.html
# See also:
# [3] http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
#
# For external volumes this implementation will raise an exception if it can't
# find or create the user's trash directory.
import sys
import os
import os.path as op
from datetime import datetime
import stat
from urllib import quote
from io import open
FILES_DIR = u'files'
INFO_DIR = u'info'
INFO_SUFFIX = u'.trashinfo'
# Default of ~/.local/share [3]
XDG_DATA_HOME = op.expanduser(os.environ.get(u'XDG_DATA_HOME', u'~/.local/share'))
HOMETRASH = op.join(XDG_DATA_HOME, u'Trash')
uid = os.getuid()
TOPDIR_TRASH = u'.Trash'
TOPDIR_FALLBACK = u'.Trash-' + unicode(uid)
def is_parent(parent, path):
path = op.realpath(path) # In case it's a symlink
parent = op.realpath(parent)
return path.startswith(parent)
def format_date(date):
return date.strftime(u"%Y-%m-%dT%H:%M:%S")
def info_for(src, topdir):
# ...it MUST not include a ".."" directory, and for files not "under" that
# directory, absolute pathnames must be used. [2]
if topdir is None or not is_parent(topdir, src):
src = op.abspath(src)
else:
src = op.relpath(src, topdir)
info = u"[Trash Info]\n"
if isinstance(src, unicode):
src = src.encode("utf8")
info += u"Path=" + quote(src) + u"\n"
info += u"DeletionDate=" + format_date(datetime.now()) + u"\n"
return info
def check_create(dir):
# use 0700 for paths [3]
if not op.exists(dir):
os.makedirs(dir, 0700)
def trash_move(src, dst, topdir=None):
filename = op.basename(src)
filespath = op.join(dst, FILES_DIR)
infopath = op.join(dst, INFO_DIR)
base_name, ext = op.splitext(filename)
counter = 0
destname = filename
while op.exists(op.join(filespath, destname)) or op.exists(op.join(infopath, destname + INFO_SUFFIX)):
counter += 1
destname = u'%s %s%s' % (base_name, counter, ext)
check_create(filespath)
check_create(infopath)
os.rename(src, op.join(filespath, destname))
f = open(op.join(infopath, destname + INFO_SUFFIX), u'w')
f.write(info_for(src, topdir))
f.close()
def find_mount_point(path):
# Even if something's wrong, "/" is a mount point, so the loop will exit.
# Use realpath in case it's a symlink
path = op.realpath(path) # Required to avoid infinite loop
while not op.ismount(path):
path = op.split(path)[0]
return path
def find_ext_volume_global_trash(volume_root):
# from [2] Trash directories (1) check for a .Trash dir with the right
# permissions set.
trash_dir = op.join(volume_root, TOPDIR_TRASH)
if not op.exists(trash_dir):
return None
mode = os.lstat(trash_dir).st_mode
# vol/.Trash must be a directory, cannot be a symlink, and must have the
# sticky bit set.
if not op.isdir(trash_dir) or op.islink(trash_dir) or not (mode & stat.S_ISVTX):
return None
trash_dir = op.join(trash_dir, unicode(uid))
try:
check_create(trash_dir)
except OSError:
return None
return trash_dir
def find_ext_volume_fallback_trash(volume_root):
# from [2] Trash directories (1) create a .Trash-$uid dir.
trash_dir = op.join(volume_root, TOPDIR_FALLBACK)
# Try to make the directory, if we can't the OSError exception will escape
# be thrown out of send2trash.
check_create(trash_dir)
return trash_dir
def find_ext_volume_trash(volume_root):
trash_dir = find_ext_volume_global_trash(volume_root)
if trash_dir is None:
trash_dir = find_ext_volume_fallback_trash(volume_root)
return trash_dir
# Pull this out so it's easy to stub (to avoid stubbing lstat itself)
def get_dev(path):
return os.lstat(path).st_dev
def send2trash(path):
try:
_send2trash(path)
except OSError:
# user's system is broken; just delete
os.unlink(path)
def _send2trash(path):
if not isinstance(path, unicode):
path = unicode(path, sys.getfilesystemencoding())
if not op.exists(path):
raise OSError(u"File not found: %s" % path)
# ...should check whether the user has the necessary permissions to delete
# it, before starting the trashing operation itself. [2]
if not os.access(path, os.W_OK):
raise OSError(u"Permission denied: %s" % path)
# if the file to be trashed is on the same device as HOMETRASH we
# want to move it there.
path_dev = get_dev(path)
# If XDG_DATA_HOME or HOMETRASH do not yet exist we need to stat the
# home directory, and these paths will be created further on if needed.
trash_dev = get_dev(op.expanduser(u'~'))
if path_dev == trash_dev:
topdir = XDG_DATA_HOME
dest_trash = HOMETRASH
else:
topdir = find_mount_point(path)
trash_dev = get_dev(topdir)
if trash_dev != path_dev:
raise OSError(u"Couldn't find mount point for %s" % path)
dest_trash = find_ext_volume_trash(topdir)
trash_move(path, dest_trash, topdir)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/thirdparty/send2trash/plat_other.py
|
plat_other.py
|
from anki.utils import intTime, ids2str, json
from anki.hooks import runHook
import re
"""
Anki maintains a cache of used tags so it can quickly present a list of tags
for autocomplete and in the browser. For efficiency, deletions are not
tracked, so unused tags can only be removed from the list with a DB check.
This module manages the tag cache and tags for notes.
"""
class TagManager(object):
# Registry save/load
#############################################################
def __init__(self, col):
self.col = col
def load(self, json_):
self.tags = json.loads(json_)
self.changed = False
def flush(self):
if self.changed:
self.col.db.execute("update col set tags=?",
json.dumps(self.tags))
self.changed = False
# Registering and fetching tags
#############################################################
def register(self, tags, usn=None):
"Given a list of tags, add any missing ones to tag registry."
found = False
for t in tags:
if t not in self.tags:
found = True
self.tags[t] = self.col.usn() if usn is None else usn
self.changed = True
if found:
runHook("newTag")
def all(self):
return self.tags.keys()
def registerNotes(self, nids=None):
"Add any missing tags from notes to the tags list."
# when called without an argument, the old list is cleared first.
if nids:
lim = " where id in " + ids2str(nids)
else:
lim = ""
self.tags = {}
self.changed = True
self.register(set(self.split(
" ".join(self.col.db.list("select distinct tags from notes"+lim)))))
def allItems(self):
return self.tags.items()
def save(self):
self.changed = True
# Bulk addition/removal from notes
#############################################################
def bulkAdd(self, ids, tags, add=True):
"Add tags in bulk. TAGS is space-separated."
newTags = self.split(tags)
if not newTags:
return
# cache tag names
self.register(newTags)
# find notes missing the tags
if add:
l = "tags not "
fn = self.addToStr
else:
l = "tags "
fn = self.remFromStr
lim = " or ".join(
[l+"like :_%d" % c for c, t in enumerate(newTags)])
res = self.col.db.all(
"select id, tags from notes where id in %s and (%s)" % (
ids2str(ids), lim),
**dict([("_%d" % x, '%% %s %%' % y)
for x, y in enumerate(newTags)]))
# update tags
nids = []
def fix(row):
nids.append(row[0])
return {'id': row[0], 't': fn(tags, row[1]), 'n':intTime(),
'u':self.col.usn()}
self.col.db.executemany(
"update notes set tags=:t,mod=:n,usn=:u where id = :id",
[fix(row) for row in res])
def bulkRem(self, ids, tags):
self.bulkAdd(ids, tags, False)
# String-based utilities
##########################################################################
def split(self, tags):
"Parse a string and return a list of tags."
return [t for t in tags.replace(u'\u3000', ' ').split(" ") if t]
def join(self, tags):
"Join tags into a single string, with leading and trailing spaces."
if not tags:
return u""
return u" %s " % u" ".join(tags)
def addToStr(self, addtags, tags):
"Add tags if they don't exist, and canonify."
currentTags = self.split(tags)
for tag in self.split(addtags):
if not self.inList(tag, currentTags):
currentTags.append(tag)
return self.join(self.canonify(currentTags))
def remFromStr(self, deltags, tags):
"Delete tags if they don't exists."
currentTags = self.split(tags)
for tag in self.split(deltags):
# find tags, ignoring case
remove = []
for tx in currentTags:
if tag.lower() == tx.lower():
remove.append(tx)
# remove them
for r in remove:
currentTags.remove(r)
return self.join(currentTags)
# List-based utilities
##########################################################################
def canonify(self, tagList):
"Strip duplicates, adjust case to match existing tags, and sort."
strippedTags = []
for t in tagList:
s = re.sub("[\"']", "", t)
for existingTag in self.tags:
if s.lower() == existingTag.lower():
s = existingTag
strippedTags.append(s)
return sorted(set(strippedTags))
def inList(self, tag, tags):
"True if TAG is in TAGS. Ignore case."
return tag.lower() in [t.lower() for t in tags]
# Sync handling
##########################################################################
def beforeUpload(self):
for k in self.tags.keys():
self.tags[k] = 0
self.save()
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/tags.py
|
tags.py
|
import time, re, datetime, shutil
from anki.utils import intTime, tmpfile, ids2str, splitFields, base91, json
from anki.db import DB
from anki.collection import _Collection
from anki.consts import *
from anki.storage import _addSchema, _getColVars, _addColVars, \
_updateIndices
#
# Upgrading is the first step in migrating to 2.0.
# Caller should have called check() on path before calling upgrade().
#
class Upgrader(object):
def __init__(self):
self.tmppath = None
# Integrity checking & initial setup
######################################################################
def check(self, path):
"Returns 'ok', 'invalid', or log of fixes applied."
# copy into a temp file before we open
self.tmppath = tmpfile(suffix=".anki2")
shutil.copy(path, self.tmppath)
# run initial check
with DB(self.tmppath) as db:
res = self._check(db)
# needs fixing?
if res not in ("ok", "invalid"):
res = self._fix(self.tmppath)
# don't allow .upgrade() if invalid
if res == "invalid":
os.unlink(self.tmppath)
self.tmppath = None
return res
def _check(self, db):
# corrupt?
try:
if db.scalar("pragma integrity_check") != "ok":
return "invalid"
except:
return "invalid"
# old version?
if db.scalar("select version from decks") < 65:
return
# ensure we have indices for checks below
db.executescript("""
create index if not exists ix_cards_factId on cards (factId);
create index if not exists ix_fields_factId on fields (factId);
analyze;""")
# fields missing a field model?
if db.list("""
select id from fields where fieldModelId not in (
select distinct id from fieldModels)"""):
return
# facts missing a field?
if db.list("""
select distinct facts.id from facts, fieldModels where
facts.modelId = fieldModels.modelId and fieldModels.id not in
(select fieldModelId from fields where factId = facts.id)"""):
return
# cards missing a fact?
if db.list("""
select id from cards where factId not in (select id from facts)"""):
return
# cards missing a card model?
if db.list("""
select id from cards where cardModelId not in
(select id from cardModels)"""):
return
# cards with a card model from the wrong model?
if db.list("""
select id from cards where cardModelId not in (select cm.id from
cardModels cm, facts f where cm.modelId = f.modelId and
f.id = cards.factId)"""):
return
# facts missing a card?
if db.list("""
select facts.id from facts
where facts.id not in (select distinct factId from cards)"""):
return
# dangling fields?
if db.list("""
select id from fields where factId not in (select id from facts)"""):
return
# incorrect types
if db.list("""
select id from cards where relativeDelay != (case
when successive then 1 when reps then 0 else 2 end)"""):
return
if db.list("""
select id from cards where type != (case
when type >= 0 then relativeDelay else relativeDelay - 3 end)"""):
return
return "ok"
def _fix(self, path):
from oldanki import DeckStorage
try:
deck = DeckStorage.Deck(path, backup=False)
except:
# if we can't open the file, it's invalid
return "invalid"
# run a db check
res = deck.fixIntegrity()
if "Database file is damaged" in res:
# we can't recover from a corrupt db
return "invalid"
# other errors are non-fatal
deck.close()
return res
# Upgrading
######################################################################
def upgrade(self):
assert self.tmppath
self.db = DB(self.tmppath)
self._upgradeSchema()
self.col = _Collection(self.db)
self._upgradeRest()
self.tmppath = None
return self.col
# Schema upgrade
######################################################################
def _upgradeSchema(self):
"Alter tables prior to ORM initialization."
db = self.db
# speed up the upgrade
db.execute("pragma temp_store = memory")
db.execute("pragma cache_size = 10000")
db.execute("pragma synchronous = off")
# these weren't always correctly set
db.execute("pragma page_size = 4096")
db.execute("pragma legacy_file_format = 0")
for mid in db.list("select id from models"):
# ensure the ordinals are correct for each cardModel
for c, cmid in enumerate(db.list(
"select id from cardModels where modelId = ? order by ordinal",
mid)):
db.execute("update cardModels set ordinal = ? where id = ?",
c, cmid)
# and fieldModel
for c, fmid in enumerate(db.list(
"select id from fieldModels where modelId = ? order by ordinal",
mid)):
db.execute("update fieldModels set ordinal = ? where id = ?",
c, fmid)
# then fix ordinals numbers on cards & fields
db.execute("""update cards set ordinal = (select ordinal from
cardModels where cardModels.id = cardModelId)""")
db.execute("""update fields set ordinal = (select ordinal from
fieldModels where id = fieldModelId)""")
# notes
###########
# tags should have a leading and trailing space if not empty, and not
# use commas
db.execute("""
update facts set tags = (case
when trim(tags) == "" then ""
else " " || replace(replace(trim(tags), ",", " "), " ", " ") || " "
end)
""")
# pull facts into memory, so we can merge them with fields efficiently
facts = db.all("""
select id, id, modelId, cast(created*1000 as int), cast(modified as int),
0, tags from facts order by created""")
# build field hash
fields = {}
for (fid, ord, val) in db.execute(
"select factId, ordinal, value from fields order by factId, ordinal"):
if fid not in fields:
fields[fid] = []
val = self._mungeField(val)
fields[fid].append((ord, val))
# build insert data and transform ids, and minimize qt's
# bold/italics/underline cruft.
map = {}
data = []
factidmap = {}
from anki.utils import minimizeHTML
highest = 0
for c, row in enumerate(facts):
oldid = row[0]
row = list(row)
if row[3] <= highest:
highest = max(highest, row[3]) + 1
row[3] = highest
else:
highest = row[3]
factidmap[row[0]] = row[3]
row[0] = row[3]
del row[3]
map[oldid] = row[0]
# convert old 64bit id into a string, discarding sign bit
row[1] = base91(abs(row[1]))
row.append(minimizeHTML("\x1f".join([x[1] for x in sorted(fields[oldid])])))
data.append(row)
# and put the facts into the new table
db.execute("drop table facts")
_addSchema(db, False)
db.executemany("insert into notes values (?,?,?,?,?,?,?,'','',0,'')", data)
db.execute("drop table fields")
# cards
###########
# we need to pull this into memory, to rewrite the creation time if
# it's not unique and update the fact id
rows = []
cardidmap = {}
highest = 0
for row in db.execute("""
select id, cast(created*1000 as int), factId, ordinal,
cast(modified as int), 0,
(case relativeDelay
when 0 then 1
when 1 then 2
when 2 then 0 end),
(case type
when 0 then 1
when 1 then 2
when 2 then 0
else type end),
cast(due as int), cast(interval as int),
cast(factor*1000 as int), reps, noCount from cards
order by created"""):
# find an unused time
row = list(row)
if row[1] <= highest:
highest = max(highest, row[1]) + 1
row[1] = highest
else:
highest = row[1]
# rewrite fact id
row[2] = factidmap[row[2]]
# note id change and save all but old id
cardidmap[row[0]] = row[1]
rows.append(row[1:])
# drop old table and rewrite
db.execute("drop table cards")
_addSchema(db, False)
db.executemany("""
insert into cards values (?,?,1,?,?,?,?,?,?,?,?,?,?,0,0,0,0,"")""",
rows)
# reviewHistory -> revlog
###########
# fetch the data so we can rewrite ids quickly
r = []
for row in db.execute("""
select
cast(time*1000 as int), cardId, 0, ease,
cast(nextInterval as int), cast(lastInterval as int),
cast(nextFactor*1000 as int), cast(min(thinkingTime, 60)*1000 as int),
yesCount from reviewHistory"""):
row = list(row)
# new card ids
try:
row[1] = cardidmap[row[1]]
except:
# id doesn't exist
continue
# no ease 0 anymore
row[3] = row[3] or 1
# determine type, overwriting yesCount
newInt = row[4]
oldInt = row[5]
yesCnt = row[8]
# yesCnt included the current answer
if row[3] > 1:
yesCnt -= 1
if oldInt < 1:
# new or failed
if yesCnt:
# type=relrn
row[8] = 2
else:
# type=lrn
row[8] = 0
else:
# type=rev
row[8] = 1
r.append(row)
db.executemany(
"insert or ignore into revlog values (?,?,?,?,?,?,?,?,?)", r)
db.execute("drop table reviewHistory")
# deck
###########
self._migrateDeckTbl()
# tags
###########
tags = {}
for t in db.list("select tag from tags"):
tags[t] = intTime()
db.execute("update col set tags = ?", json.dumps(tags))
db.execute("drop table tags")
db.execute("drop table cardTags")
# the rest
###########
db.execute("drop table media")
db.execute("drop table sources")
self._migrateModels()
_updateIndices(db)
def _migrateDeckTbl(self):
db = self.db
db.execute("delete from col")
db.execute("""
insert or replace into col select id, cast(created as int), :t,
:t, 99, 0, 0, cast(lastSync as int),
"", "", "", "", "" from decks""", t=intTime())
# prepare a deck to store the old deck options
g, gc, conf = _getColVars(db)
# delete old selective study settings, which we can't auto-upgrade easily
keys = ("newActive", "newInactive", "revActive", "revInactive")
for k in keys:
db.execute("delete from deckVars where key=:k", k=k)
# copy other settings, ignoring deck order as there's a new default
gc['new']['perDay'] = db.scalar("select newCardsPerDay from decks")
gc['new']['order'] = min(1, db.scalar("select newCardOrder from decks"))
# these are collection level, and can't be imported on a per-deck basis
# conf['newSpread'] = db.scalar("select newCardSpacing from decks")
# conf['timeLim'] = db.scalar("select sessionTimeLimit from decks")
# add any deck vars and save
for (k, v) in db.execute("select * from deckVars").fetchall():
if k in ("hexCache", "cssCache"):
# ignore
pass
elif k == "leechFails":
gc['lapse']['leechFails'] = int(v)
else:
conf[k] = v
# don't use a learning mode for upgrading users
#gc['new']['delays'] = [10]
_addColVars(db, g, gc, conf)
# clean up
db.execute("drop table decks")
db.execute("drop table deckVars")
def _migrateModels(self):
import anki.models
db = self.db
times = {}
mods = {}
for row in db.all(
"select id, name from models"):
# use only first 31 bits if not old anki id
t = abs(row[0])
if t > 4294967296:
t >>= 32
assert t > 0
m = anki.models.defaultModel.copy()
m['id'] = t
m['name'] = row[1]
m['mod'] = intTime()
m['tags'] = []
m['flds'] = self._fieldsForModel(row[0])
m['tmpls'] = self._templatesForModel(row[0], m['flds'])
mods[m['id']] = m
db.execute("update notes set mid = ? where mid = ?", t, row[0])
# save and clean up
db.execute("update col set models = ?", json.dumps(mods))
db.execute("drop table fieldModels")
db.execute("drop table cardModels")
db.execute("drop table models")
def _fieldsForModel(self, mid):
import anki.models
db = self.db
dconf = anki.models.defaultField
flds = []
# note: qsize & qcol are used in upgrade then discarded
for c, row in enumerate(db.all("""
select name, features, quizFontFamily, quizFontSize, quizFontColour,
editFontSize from fieldModels where modelId = ?
order by ordinal""", mid)):
conf = dconf.copy()
(conf['name'],
conf['rtl'],
conf['font'],
conf['qsize'],
conf['qcol'],
conf['size']) = row
conf['ord'] = c
# ensure data is good
conf['rtl'] = not not conf['rtl']
conf['font'] = conf['font'] or "Arial"
conf['size'] = 12
# will be removed later in upgrade
conf['qcol'] = conf['qcol'] or "#000"
conf['qsize'] = conf['qsize'] or 20
flds.append(conf)
return flds
def _templatesForModel(self, mid, flds):
import anki.models
db = self.db
dconf = anki.models.defaultTemplate
tmpls = []
for c, row in enumerate(db.all("""
select name, active, qformat, aformat, questionInAnswer,
questionAlign, lastFontColour, typeAnswer from cardModels
where modelId = ?
order by ordinal""", mid)):
conf = dconf.copy()
(conf['name'],
conf['actv'],
conf['qfmt'],
conf['afmt'],
# the following are used in upgrade then discarded
hideq,
conf['align'],
conf['bg'],
typeAns) = row
conf['ord'] = c
for type in ("qfmt", "afmt"):
# ensure the new style field format
conf[type] = re.sub("%\((.+?)\)s", "{{\\1}}", conf[type])
# some special names have changed
conf[type] = re.sub(
"(?i){{tags}}", "{{Tags}}", conf[type])
conf[type] = re.sub(
"(?i){{cardModel}}", "{{Card}}", conf[type])
conf[type] = re.sub(
"(?i){{modelTags}}", "{{Type}}", conf[type])
# type answer is now embedded in the format
if typeAns:
if type == "qfmt" or hideq:
conf[type] += '<br>{{type:%s}}' % typeAns
# q fields now in a
if not hideq:
conf['afmt'] = (
"{{FrontSide}}\n\n<hr id=answer>\n\n" + conf['afmt'])
tmpls.append(conf)
return tmpls
# Field munging
######################################################################
def _mungeField(self, val):
# we no longer wrap fields in white-space: pre-wrap, so we need to
# convert previous whitespace into non-breaking spaces
def repl(match):
return match.group(1).replace(" ", " ")
return re.sub("( +)", repl, val)
# Template upgrading
######################################################################
# - {{field}} no longer inserts an implicit span, so we make the span
# explicit on upgrade.
# - likewise with alignment and background color
def _upgradeTemplates(self):
d = self.col
for m in d.models.all():
# cache field styles
styles = {}
for f in m['flds']:
attrs = []
if f['font'].lower() != 'arial':
attrs.append("font-family: %s" % f['font'])
if f['qsize'] != 20:
attrs.append("font-size: %spx" % f['qsize'])
if f['qcol'] not in ("black", "#000"):
attrs.append("color: %s" % f['qcol'])
if f['rtl']:
attrs.append("direction: rtl; unicode-bidi: embed")
if attrs:
styles[f['name']] = '<span style="%s">{{%s}}</span>' % (
"; ".join(attrs), f['name'])
# obsolete
del f['qcol']
del f['qsize']
# then for each template
for t in m['tmpls']:
def repl(match):
field = match.group(2)
if field in styles:
return match.group(1) + styles[field]
# special or non-existant field; leave alone
return match.group(0)
for k in 'qfmt', 'afmt':
# replace old field references
t[k] = re.sub("(^|[^{]){{([^{}]+)?}}", repl, t[k])
# then strip extra {}s from other fields
t[k] = t[k].replace("{{{", "{{").replace("}}}", "}}")
# remove superfluous formatting from 1.0 -> 1.2 upgrade
t[k] = re.sub("font-size: ?20px;?", "", t[k])
t[k] = re.sub("(?i)font-family: ?arial;?", "", t[k])
t[k] = re.sub("color: ?#000(000)?;?", "", t[k])
t[k] = re.sub("white-space: ?pre-wrap;?", "", t[k])
# new furigana handling
if "japanese" in m['name'].lower():
if k == 'qfmt':
t[k] = t[k].replace(
"{{Reading}}", "{{kana:Reading}}")
else:
t[k] = t[k].replace(
"{{Reading}}", "{{furigana:Reading}}")
# adjust css
css = ""
if t['bg'] != "white" and t['bg'].lower() != "#ffffff":
css = "background-color: %s;" % t['bg']
if t['align']:
css += "text-align: %s" % ("left", "right")[t['align']-1]
if css:
css = '\n.card%d { %s }' % (t['ord']+1, css)
m['css'] += css
# remove obsolete
del t['bg']
del t['align']
# save model
d.models.save(m)
# Media references
######################################################################
# In 2.0 we drop support for media and latex references in the template,
# since they require generating card templates to see what media a note
# uses, and are confusing for shared deck users. To ease the upgrade
# process, we automatically convert the references to new fields.
def _rewriteMediaRefs(self):
col = self.col
def rewriteRef(key):
all = match.group(0)
fname = match.group("fname")
if all in state['mflds']:
# we've converted this field before
new = state['mflds'][all]
else:
# get field name and any prefix/suffix
m2 = re.match(
"([^{]*)\{\{\{?(?:text:)?([^}]+)\}\}\}?(.*)",
fname)
# not a field reference?
if not m2:
return
pre, ofld, suf = m2.groups()
# get index of field name
try:
idx = col.models.fieldMap(m)[ofld][0]
except:
# invalid field or tag reference; don't rewrite
return
# find a free field name
while 1:
state['fields'] += 1
fld = "Media %d" % state['fields']
if fld not in col.models.fieldMap(m).keys():
break
# add the new field
f = col.models.newField(fld)
f['qsize'] = 20
f['qcol'] = '#000'
col.models.addField(m, f)
# loop through notes and write reference into new field
data = []
for id, flds in self.col.db.execute(
"select id, flds from notes where id in "+
ids2str(col.models.nids(m))):
sflds = splitFields(flds)
ref = all.replace(fname, pre+sflds[idx]+suf)
data.append((flds+ref, id))
# update notes
col.db.executemany("update notes set flds=? where id=?",
data)
# note field for future
state['mflds'][fname] = fld
new = fld
# rewrite reference in template
t[key] = t[key].replace(all, "{{{%s}}}" % new)
regexps = col.media.regexps + [
r"(\[latex\](?P<fname>.+?)\[/latex\])",
r"(\[\$\](?P<fname>.+?)\[/\$\])",
r"(\[\$\$\](?P<fname>.+?)\[/\$\$\])"]
# process each model
for m in col.models.all():
state = dict(mflds={}, fields=0)
for t in m['tmpls']:
for r in regexps:
for match in re.finditer(r, t['qfmt']):
rewriteRef('qfmt')
for match in re.finditer(r, t['afmt']):
rewriteRef('afmt')
if state['fields']:
col.models.save(m)
# Inactive templates
######################################################################
# Templates can't be declared as inactive anymore. Remove any that are
# marked inactive and have no dependent cards.
def _removeInactive(self):
d = self.col
for m in d.models.all():
remove = []
for t in m['tmpls']:
if not t['actv']:
if not d.db.scalar("""
select 1 from cards where nid in (select id from notes where mid = ?)
and ord = ? limit 1""", m['id'], t['ord']):
remove.append(t)
del t['actv']
for r in remove:
try:
d.models.remTemplate(m, r)
except AssertionError:
# if the model was unused this could result in all
# templates being removed; ignore error
pass
d.models.save(m)
# Conditional templates
######################################################################
# For models that don't use a given template in all cards, we'll need to
# add a new field to notes to indicate if the card should be generated or not
def _addFlagFields(self):
for m in self.col.models.all():
nids = self.col.models.nids(m)
changed = False
for tmpl in m['tmpls']:
if self._addFlagFieldsForTemplate(m, nids, tmpl):
changed = True
if changed:
# save model
self.col.models.save(m, templates=True)
def _addFlagFieldsForTemplate(self, m, nids, tmpl):
cids = self.col.db.list(
"select id from cards where nid in %s and ord = ?" %
ids2str(nids), tmpl['ord'])
if len(cids) == len(nids):
# not selectively used
return
# add a flag field
name = tmpl['name']
have = [f['name'] for f in m['flds']]
while name in have:
name += "_"
f = self.col.models.newField(name)
self.col.models.addField(m, f)
# find the notes that have that card
haveNids = self.col.db.list(
"select nid from cards where id in "+ids2str(cids))
# add "y" to the appended field for those notes
self.col.db.execute(
"update notes set flds = flds || 'y' where id in "+ids2str(
haveNids))
# wrap the template in a conditional
tmpl['qfmt'] = "{{#%s}}\n%s\n{{/%s}}" % (
f['name'], tmpl['qfmt'], f['name'])
return True
# Post-schema upgrade
######################################################################
def _upgradeRest(self):
"Handle the rest of the upgrade to 2.0."
col = self.col
# make sure we have a current model id
col.models.setCurrent(col.models.models.values()[0])
# remove unused templates that were marked inactive
self._removeInactive()
# rewrite media references in card template
self._rewriteMediaRefs()
# template handling has changed
self._upgradeTemplates()
# add fields for selectively used templates
self._addFlagFields()
# fix creation time
col.sched._updateCutoff()
d = datetime.datetime.today()
d -= datetime.timedelta(hours=4)
d = datetime.datetime(d.year, d.month, d.day)
d += datetime.timedelta(hours=4)
d -= datetime.timedelta(days=1+int((time.time()-col.crt)/86400))
col.crt = int(time.mktime(d.timetuple()))
col.sched._updateCutoff()
# update uniq cache
col.updateFieldCache(col.db.list("select id from notes"))
# remove old views
for v in ("failedCards", "revCardsOld", "revCardsNew",
"revCardsDue", "revCardsRandom", "acqCardsRandom",
"acqCardsOld", "acqCardsNew"):
col.db.execute("drop view if exists %s" % v)
# remove stats, as it's all in the revlog now
col.db.execute("drop table if exists stats")
# suspended cards don't use ranges anymore
col.db.execute("update cards set queue=-1 where queue between -3 and -1")
col.db.execute("update cards set queue=-2 where queue between 3 and 5")
col.db.execute("update cards set queue=type where queue between 6 and 8")
# remove old deleted tables
for t in ("cards", "notes", "models", "media"):
col.db.execute("drop table if exists %sDeleted" % t)
# and failed cards
left = len(col.decks.confForDid(1)['lapse']['delays'])*1001
col.db.execute("""
update cards set left=?,type=1,queue=1,ivl=1 where type=1 and ivl <= 1
and queue>=0""", left)
col.db.execute("""
update cards set odue=?,left=?,type=2 where type=1 and ivl > 1 and queue>=0""",
col.sched.today+1, left)
# and due cards
col.db.execute("""
update cards set due = cast(
(case when due < :stamp then 0 else 1 end) +
((due-:stamp)/86400) as int)+:today where type = 2
""", stamp=col.sched.dayCutoff, today=col.sched.today)
# lapses were counted differently in 1.0, so we should have a higher
# default lapse threshold
for d in col.decks.allConf():
d['lapse']['leechFails'] = 16
col.decks.save(d)
# possibly re-randomize
conf = col.decks.allConf()[0]
if not conf['new']['order']:
col.sched.randomizeCards(1)
else:
col.sched.orderCards(1)
# optimize and finish
col.db.commit()
col.db.execute("vacuum")
col.db.execute("analyze")
col.db.execute("update col set ver = ?", SCHEMA_VERSION)
col.save()
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/upgrade.py
|
upgrade.py
|
import re, sys, threading, time, subprocess, os, atexit
import random
from anki.hooks import addHook
from anki.utils import tmpdir, isWin, isMac
# Shared utils
##########################################################################
_soundReg = "\[sound:(.*?)\]"
def playFromText(text):
for match in re.findall(_soundReg, text):
play(match)
def stripSounds(text):
return re.sub(_soundReg, "", text)
def hasSound(text):
return re.search(_soundReg, text) is not None
##########################################################################
processingSrc = u"rec.wav"
processingDst = u"rec.mp3"
processingChain = []
recFiles = []
processingChain = [
["lame", "rec.wav", processingDst, "--noreplaygain", "--quiet"],
]
# don't show box on windows
if isWin:
si = subprocess.STARTUPINFO()
try:
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
except:
# python2.7+
si.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW
else:
si = None
if isMac:
# make sure lame, which is installed in /usr/local/bin, is in the path
os.environ['PATH'] += ":" + "/usr/local/bin"
dir = os.path.dirname(os.path.abspath(__file__))
dir = os.path.abspath(dir + "/../../../..")
os.environ['PATH'] += ":" + dir + "/audio"
def retryWait(proc):
# osx throws interrupted system call errors frequently
while 1:
try:
return proc.wait()
except OSError:
continue
# Mplayer settings
##########################################################################
if isWin:
mplayerCmd = ["mplayer.exe", "-ao", "win32"]
dir = os.path.dirname(os.path.abspath(sys.argv[0]))
os.environ['PATH'] += ";" + dir
os.environ['PATH'] += ";" + dir + "\\..\\win\\top" # for testing
else:
mplayerCmd = ["mplayer"]
mplayerCmd += ["-really-quiet", "-noautosub"]
# Mplayer in slave mode
##########################################################################
mplayerQueue = []
mplayerManager = None
mplayerReader = None
mplayerEvt = threading.Event()
mplayerClear = False
class MplayerMonitor(threading.Thread):
def run(self):
global mplayerClear
self.mplayer = None
self.deadPlayers = []
while 1:
mplayerEvt.wait()
mplayerEvt.clear()
# clearing queue?
if mplayerClear and self.mplayer:
try:
self.mplayer.stdin.write("stop\n")
except:
# mplayer quit by user (likely video)
self.deadPlayers.append(self.mplayer)
self.mplayer = None
# loop through files to play
while mplayerQueue:
# ensure started
if not self.mplayer:
self.startProcess()
# pop a file
try:
item = mplayerQueue.pop(0)
except IndexError:
# queue was cleared by main thread
continue
if mplayerClear:
mplayerClear = False
extra = ""
else:
extra = " 1"
cmd = 'loadfile "%s"%s\n' % (item, extra)
try:
self.mplayer.stdin.write(cmd)
except:
# mplayer has quit and needs restarting
self.deadPlayers.append(self.mplayer)
self.mplayer = None
self.startProcess()
self.mplayer.stdin.write(cmd)
# if we feed mplayer too fast it loses files
time.sleep(1)
# wait() on finished processes. we don't want to block on the
# wait, so we keep trying each time we're reactivated
def clean(pl):
if pl.poll() is not None:
pl.wait()
return False
else:
return True
self.deadPlayers = [pl for pl in self.deadPlayers if clean(pl)]
def kill(self):
if not self.mplayer:
return
try:
self.mplayer.stdin.write("quit\n")
self.deadPlayers.append(self.mplayer)
except:
pass
self.mplayer = None
def startProcess(self):
try:
cmd = mplayerCmd + ["-slave", "-idle"]
devnull = file(os.devnull, "w")
self.mplayer = subprocess.Popen(
cmd, startupinfo=si, stdin=subprocess.PIPE,
stdout=devnull, stderr=devnull)
except OSError:
mplayerEvt.clear()
raise Exception("Did you install mplayer?")
def queueMplayer(path):
ensureMplayerThreads()
if isWin and os.path.exists(path):
# mplayer on windows doesn't like the encoding, so we create a
# temporary file instead. oddly, foreign characters in the dirname
# don't seem to matter.
dir = tmpdir()
name = os.path.join(dir, "audio%s%s" % (
random.randrange(0, 1000000), os.path.splitext(path)[1]))
f = open(name, "wb")
f.write(open(path, "rb").read())
f.close()
# it wants unix paths, too!
path = name.replace("\\", "/")
path = path.encode(sys.getfilesystemencoding())
else:
path = path.encode("utf-8")
mplayerQueue.append(path)
mplayerEvt.set()
def clearMplayerQueue():
global mplayerClear, mplayerQueue
mplayerQueue = []
mplayerClear = True
mplayerEvt.set()
def ensureMplayerThreads():
global mplayerManager
if not mplayerManager:
mplayerManager = MplayerMonitor()
mplayerManager.daemon = True
mplayerManager.start()
# ensure the tmpdir() exit handler is registered first so it runs
# after the mplayer exit
tmpdir()
# clean up mplayer on exit
atexit.register(stopMplayer)
def stopMplayer(*args):
if not mplayerManager:
return
mplayerManager.kill()
addHook("unloadProfile", stopMplayer)
# PyAudio recording
##########################################################################
try:
import pyaudio
import wave
PYAU_FORMAT = pyaudio.paInt16
PYAU_CHANNELS = 1
PYAU_INPUT_INDEX = None
except:
pass
class _Recorder(object):
def postprocess(self, encode=True):
self.encode = encode
for c in processingChain:
#print c
if not self.encode and c[0] == 'lame':
continue
try:
ret = retryWait(subprocess.Popen(c, startupinfo=si))
except:
ret = True
if ret:
raise Exception(_(
"Error running %s") %
u" ".join(c))
class PyAudioThreadedRecorder(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.finish = False
def run(self):
chunk = 1024
try:
p = pyaudio.PyAudio()
except NameError:
raise Exception(
"Pyaudio not installed (recording not supported on OSX10.3)")
rate = int(p.get_default_input_device_info()['defaultSampleRate'])
stream = p.open(format=PYAU_FORMAT,
channels=PYAU_CHANNELS,
rate=rate,
input=True,
input_device_index=PYAU_INPUT_INDEX,
frames_per_buffer=chunk)
all = []
while not self.finish:
try:
data = stream.read(chunk)
except IOError, e:
if e[1] == pyaudio.paInputOverflowed:
data = None
else:
raise
if data:
all.append(data)
stream.close()
p.terminate()
data = ''.join(all)
wf = wave.open(processingSrc, 'wb')
wf.setnchannels(PYAU_CHANNELS)
wf.setsampwidth(p.get_sample_size(PYAU_FORMAT))
wf.setframerate(rate)
wf.writeframes(data)
wf.close()
class PyAudioRecorder(_Recorder):
def __init__(self):
for t in recFiles + [processingSrc, processingDst]:
try:
os.unlink(t)
except OSError:
pass
self.encode = False
def start(self):
self.thread = PyAudioThreadedRecorder()
self.thread.start()
def stop(self):
self.thread.finish = True
self.thread.join()
def file(self):
if self.encode:
tgt = u"rec%d.mp3" % time.time()
os.rename(processingDst, tgt)
return tgt
else:
return processingSrc
# Audio interface
##########################################################################
_player = queueMplayer
_queueEraser = clearMplayerQueue
def play(path):
_player(path)
def clearAudioQueue():
_queueEraser()
Recorder = PyAudioRecorder
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/sound.py
|
sound.py
|
import pprint
import re
import time
import os
import random
import stat
import datetime
import copy
import traceback
from anki.lang import _, ngettext
from anki.utils import ids2str, fieldChecksum, stripHTML, \
intTime, splitFields, joinFields, maxID, json
from anki.hooks import runFilter, runHook
from anki.sched import Scheduler
from anki.models import ModelManager
from anki.media import MediaManager
from anki.decks import DeckManager
from anki.tags import TagManager
from anki.consts import *
from anki.errors import AnkiError
from anki.sound import stripSounds
import anki.latex # sets up hook
import anki.cards
import anki.notes
import anki.template
import anki.find
defaultConf = {
# review options
'activeDecks': [1],
'curDeck': 1,
'newSpread': NEW_CARDS_DISTRIBUTE,
'collapseTime': 1200,
'timeLim': 0,
'estTimes': True,
'dueCounts': True,
# other config
'curModel': None,
'nextPos': 1,
'sortType': "noteFld",
'sortBackwards': False,
'addToCur': True, # add new to currently selected deck?
}
# this is initialized by storage.Collection
class _Collection(object):
def __init__(self, db, server=False, log=False):
self._debugLog = log
self.db = db
self.path = db._path
self._openLog()
self.log(self.path, anki.version)
self.server = server
self._lastSave = time.time()
self.clearUndo()
self.media = MediaManager(self, server)
self.models = ModelManager(self)
self.decks = DeckManager(self)
self.tags = TagManager(self)
self.load()
if not self.crt:
d = datetime.datetime.today()
d -= datetime.timedelta(hours=4)
d = datetime.datetime(d.year, d.month, d.day)
d += datetime.timedelta(hours=4)
self.crt = int(time.mktime(d.timetuple()))
self.sched = Scheduler(self)
if not self.conf.get("newBury", False):
self.conf['newBury'] = True
self.setMod()
def name(self):
n = os.path.splitext(os.path.basename(self.path))[0]
return n
# DB-related
##########################################################################
def load(self):
(self.crt,
self.mod,
self.scm,
self.dty, # no longer used
self._usn,
self.ls,
self.conf,
models,
decks,
dconf,
tags) = self.db.first("""
select crt, mod, scm, dty, usn, ls,
conf, models, decks, dconf, tags from col""")
self.conf = json.loads(self.conf)
self.models.load(models)
self.decks.load(decks, dconf)
self.tags.load(tags)
def setMod(self):
"""Mark DB modified.
DB operations and the deck/tag/model managers do this automatically, so this
is only necessary if you modify properties of this object or the conf dict."""
self.db.mod = True
def flush(self, mod=None):
"Flush state to DB, updating mod time."
self.mod = intTime(1000) if mod is None else mod
self.db.execute(
"""update col set
crt=?, mod=?, scm=?, dty=?, usn=?, ls=?, conf=?""",
self.crt, self.mod, self.scm, self.dty,
self._usn, self.ls, json.dumps(self.conf))
def save(self, name=None, mod=None):
"Flush, commit DB, and take out another write lock."
# let the managers conditionally flush
self.models.flush()
self.decks.flush()
self.tags.flush()
# and flush deck + bump mod if db has been changed
if self.db.mod:
self.flush(mod=mod)
self.db.commit()
self.lock()
self.db.mod = False
self._markOp(name)
self._lastSave = time.time()
def autosave(self):
"Save if 5 minutes has passed since last save."
if time.time() - self._lastSave > 300:
self.save()
def lock(self):
# make sure we don't accidentally bump mod time
mod = self.db.mod
self.db.execute("update col set mod=mod")
self.db.mod = mod
def close(self, save=True):
"Disconnect from DB."
if self.db:
if save:
self.save()
else:
self.rollback()
if not self.server:
self.db.execute("pragma journal_mode = delete")
self.db.close()
self.db = None
self.media.close()
self._closeLog()
def reopen(self):
"Reconnect to DB (after changing threads, etc)."
import anki.db
if not self.db:
self.db = anki.db.DB(self.path)
self.media.connect()
self._openLog()
def rollback(self):
self.db.rollback()
self.load()
self.lock()
def modSchema(self, check):
"Mark schema modified. Call this first so user can abort if necessary."
if not self.schemaChanged():
if check and not runFilter("modSchema", True):
raise AnkiError("abortSchemaMod")
self.scm = intTime(1000)
self.setMod()
def schemaChanged(self):
"True if schema changed since last sync."
return self.scm > self.ls
def usn(self):
return self._usn if self.server else -1
def beforeUpload(self):
"Called before a full upload."
tbls = "notes", "cards", "revlog"
for t in tbls:
self.db.execute("update %s set usn=0 where usn=-1" % t)
# we can save space by removing the log of deletions
self.db.execute("delete from graves")
self._usn += 1
self.models.beforeUpload()
self.tags.beforeUpload()
self.decks.beforeUpload()
self.modSchema(check=False)
self.ls = self.scm
# ensure db is compacted before upload
self.db.execute("vacuum")
self.db.execute("analyze")
self.close()
# Object creation helpers
##########################################################################
def getCard(self, id):
return anki.cards.Card(self, id)
def getNote(self, id):
return anki.notes.Note(self, id=id)
# Utils
##########################################################################
def nextID(self, type, inc=True):
type = "next"+type.capitalize()
id = self.conf.get(type, 1)
if inc:
self.conf[type] = id+1
return id
def reset(self):
"Rebuild the queue and reload data after DB modified."
self.sched.reset()
# Deletion logging
##########################################################################
def _logRem(self, ids, type):
self.db.executemany("insert into graves values (%d, ?, %d)" % (
self.usn(), type), ([x] for x in ids))
# Notes
##########################################################################
def noteCount(self):
return self.db.scalar("select count() from notes")
def newNote(self, forDeck=True):
"Return a new note with the current model."
return anki.notes.Note(self, self.models.current(forDeck))
def addNote(self, note):
"Add a note to the collection. Return number of new cards."
# check we have card models available, then save
cms = self.findTemplates(note)
if not cms:
return 0
note.flush()
# deck conf governs which of these are used
due = self.nextID("pos")
# add cards
ncards = 0
for template in cms:
self._newCard(note, template, due)
ncards += 1
return ncards
def remNotes(self, ids):
self.remCards(self.db.list("select id from cards where nid in "+
ids2str(ids)))
def _remNotes(self, ids):
"Bulk delete notes by ID. Don't call this directly."
if not ids:
return
strids = ids2str(ids)
# we need to log these independently of cards, as one side may have
# more card templates
runHook("remNotes", self, ids)
self._logRem(ids, REM_NOTE)
self.db.execute("delete from notes where id in %s" % strids)
# Card creation
##########################################################################
def findTemplates(self, note):
"Return (active), non-empty templates."
model = note.model()
avail = self.models.availOrds(model, joinFields(note.fields))
return self._tmplsFromOrds(model, avail)
def _tmplsFromOrds(self, model, avail):
ok = []
if model['type'] == MODEL_STD:
for t in model['tmpls']:
if t['ord'] in avail:
ok.append(t)
else:
# cloze - generate temporary templates from first
for ord in avail:
t = copy.copy(model['tmpls'][0])
t['ord'] = ord
ok.append(t)
return ok
def genCards(self, nids):
"Generate cards for non-empty templates, return ids to remove."
# build map of (nid,ord) so we don't create dupes
snids = ids2str(nids)
have = {}
dids = {}
for id, nid, ord, did in self.db.execute(
"select id, nid, ord, did from cards where nid in "+snids):
# existing cards
if nid not in have:
have[nid] = {}
have[nid][ord] = id
# and their dids
if nid in dids:
if dids[nid] and dids[nid] != did:
# cards are in two or more different decks; revert to
# model default
dids[nid] = None
else:
# first card or multiple cards in same deck
dids[nid] = did
# build cards for each note
data = []
ts = maxID(self.db)
now = intTime()
rem = []
usn = self.usn()
for nid, mid, flds in self.db.execute(
"select id, mid, flds from notes where id in "+snids):
model = self.models.get(mid)
avail = self.models.availOrds(model, flds)
did = dids.get(nid) or model['did']
# add any missing cards
for t in self._tmplsFromOrds(model, avail):
doHave = nid in have and t['ord'] in have[nid]
if not doHave:
# check deck is not a cram deck
did = t['did'] or did
if self.decks.isDyn(did):
did = 1
# if the deck doesn't exist, use default instead
did = self.decks.get(did)['id']
# we'd like to use the same due# as sibling cards, but we
# can't retrieve that quickly, so we give it a new id
# instead
data.append((ts, nid, did, t['ord'],
now, usn, self.nextID("pos")))
ts += 1
# note any cards that need removing
if nid in have:
for ord, id in have[nid].items():
if ord not in avail:
rem.append(id)
# bulk update
self.db.executemany("""
insert into cards values (?,?,?,?,?,?,0,0,?,0,0,0,0,0,0,0,0,"")""",
data)
return rem
# type 0 - when previewing in add dialog, only non-empty
# type 1 - when previewing edit, only existing
# type 2 - when previewing in models dialog, all templates
def previewCards(self, note, type=0):
if type == 0:
cms = self.findTemplates(note)
elif type == 1:
cms = [c.template() for c in note.cards()]
else:
cms = note.model()['tmpls']
if not cms:
return []
cards = []
for template in cms:
cards.append(self._newCard(note, template, 1, flush=False))
return cards
def _newCard(self, note, template, due, flush=True):
"Create a new card."
card = anki.cards.Card(self)
card.nid = note.id
card.ord = template['ord']
card.did = template['did'] or note.model()['did']
# if invalid did, use default instead
deck = self.decks.get(card.did)
if deck['dyn']:
# must not be a filtered deck
card.did = 1
else:
card.did = deck['id']
card.due = self._dueForDid(card.did, due)
if flush:
card.flush()
return card
def _dueForDid(self, did, due):
conf = self.decks.confForDid(did)
# in order due?
if conf['new']['order'] == NEW_CARDS_DUE:
return due
else:
# random mode; seed with note ts so all cards of this note get the
# same random number
r = random.Random()
r.seed(due)
return r.randrange(1, max(due, 1000))
# Cards
##########################################################################
def isEmpty(self):
return not self.db.scalar("select 1 from cards limit 1")
def cardCount(self):
return self.db.scalar("select count() from cards")
def remCards(self, ids, notes=True):
"Bulk delete cards by ID."
if not ids:
return
sids = ids2str(ids)
nids = self.db.list("select nid from cards where id in "+sids)
# remove cards
self._logRem(ids, REM_CARD)
self.db.execute("delete from cards where id in "+sids)
# then notes
if not notes:
return
nids = self.db.list("""
select id from notes where id in %s and id not in (select nid from cards)""" %
ids2str(nids))
self._remNotes(nids)
def emptyCids(self):
rem = []
for m in self.models.all():
rem += self.genCards(self.models.nids(m))
return rem
def emptyCardReport(self, cids):
rep = ""
for ords, cnt, flds in self.db.all("""
select group_concat(ord+1), count(), flds from cards c, notes n
where c.nid = n.id and c.id in %s group by nid""" % ids2str(cids)):
rep += _("Empty card numbers: %(c)s\nFields: %(f)s\n\n") % dict(
c=ords, f=flds.replace("\x1f", " / "))
return rep
# Field checksums and sorting fields
##########################################################################
def _fieldData(self, snids):
return self.db.execute(
"select id, mid, flds from notes where id in "+snids)
def updateFieldCache(self, nids):
"Update field checksums and sort cache, after find&replace, etc."
snids = ids2str(nids)
r = []
for (nid, mid, flds) in self._fieldData(snids):
fields = splitFields(flds)
model = self.models.get(mid)
if not model:
# note points to invalid model
continue
r.append((stripHTML(fields[self.models.sortIdx(model)]),
fieldChecksum(fields[0]),
nid))
# apply, relying on calling code to bump usn+mod
self.db.executemany("update notes set sfld=?, csum=? where id=?", r)
# Q/A generation
##########################################################################
def renderQA(self, ids=None, type="card"):
# gather metadata
if type == "card":
where = "and c.id in " + ids2str(ids)
elif type == "note":
where = "and f.id in " + ids2str(ids)
elif type == "model":
where = "and m.id in " + ids2str(ids)
elif type == "all":
where = ""
else:
raise Exception()
return [self._renderQA(row)
for row in self._qaData(where)]
def _renderQA(self, data, qfmt=None, afmt=None):
"Returns hash of id, question, answer."
# data is [cid, nid, mid, did, ord, tags, flds]
# unpack fields and create dict
flist = splitFields(data[6])
fields = {}
model = self.models.get(data[2])
for (name, (idx, conf)) in self.models.fieldMap(model).items():
fields[name] = flist[idx]
fields['Tags'] = data[5].strip()
fields['Type'] = model['name']
fields['Deck'] = self.decks.name(data[3])
fields['Subdeck'] = fields['Deck'].split('::')[-1]
if model['type'] == MODEL_STD:
template = model['tmpls'][data[4]]
else:
template = model['tmpls'][0]
fields['Card'] = template['name']
fields['c%d' % (data[4]+1)] = "1"
# render q & a
d = dict(id=data[0])
qfmt = qfmt or template['qfmt']
afmt = afmt or template['afmt']
for (type, format) in (("q", qfmt), ("a", afmt)):
if type == "q":
format = re.sub("{{(?!type:)(.*?)cloze:", r"{{\1cq-%d:" % (data[4]+1), format)
format = format.replace("<%cloze:", "<%%cq:%d:" % (
data[4]+1))
else:
format = re.sub("{{(.*?)cloze:", r"{{\1ca-%d:" % (data[4]+1), format)
format = format.replace("<%cloze:", "<%%ca:%d:" % (
data[4]+1))
fields['FrontSide'] = stripSounds(d['q'])
fields = runFilter("mungeFields", fields, model, data, self)
html = anki.template.render(format, fields)
d[type] = runFilter(
"mungeQA", html, type, fields, model, data, self)
# empty cloze?
if type == 'q' and model['type'] == MODEL_CLOZE:
if not self.models._availClozeOrds(model, data[6], False):
d['q'] += ("<p>" + _(
"Please edit this note and add some cloze deletions. (%s)") % (
"<a href=%s#cloze>%s</a>" % (HELP_SITE, _("help"))))
return d
def _qaData(self, where=""):
"Return [cid, nid, mid, did, ord, tags, flds] db query"
return self.db.execute("""
select c.id, f.id, f.mid, c.did, c.ord, f.tags, f.flds
from cards c, notes f
where c.nid == f.id
%s""" % where)
# Finding cards
##########################################################################
def findCards(self, query, order=False):
return anki.find.Finder(self).findCards(query, order)
def findNotes(self, query):
return anki.find.Finder(self).findNotes(query)
def findReplace(self, nids, src, dst, regex=None, field=None, fold=True):
return anki.find.findReplace(self, nids, src, dst, regex, field, fold)
def findDupes(self, fieldName, search=""):
return anki.find.findDupes(self, fieldName, search)
# Stats
##########################################################################
def cardStats(self, card):
from anki.stats import CardStats
return CardStats(self, card).report()
def stats(self):
from anki.stats import CollectionStats
return CollectionStats(self)
# Timeboxing
##########################################################################
def startTimebox(self):
self._startTime = time.time()
self._startReps = self.sched.reps
def timeboxReached(self):
"Return (elapsedTime, reps) if timebox reached, or False."
if not self.conf['timeLim']:
# timeboxing disabled
return False
elapsed = time.time() - self._startTime
if elapsed > self.conf['timeLim']:
return (self.conf['timeLim'], self.sched.reps - self._startReps)
# Undo
##########################################################################
def clearUndo(self):
# [type, undoName, data]
# type 1 = review; type 2 = checkpoint
self._undo = None
def undoName(self):
"Undo menu item name, or None if undo unavailable."
if not self._undo:
return None
return self._undo[1]
def undo(self):
if self._undo[0] == 1:
return self._undoReview()
else:
self._undoOp()
def markReview(self, card):
old = []
if self._undo:
if self._undo[0] == 1:
old = self._undo[2]
self.clearUndo()
wasLeech = card.note().hasTag("leech") or False
self._undo = [1, _("Review"), old + [copy.copy(card)], wasLeech]
def _undoReview(self):
data = self._undo[2]
wasLeech = self._undo[3]
c = data.pop()
if not data:
self.clearUndo()
# remove leech tag if it didn't have it before
if not wasLeech and c.note().hasTag("leech"):
c.note().delTag("leech")
c.note().flush()
# write old data
c.flush()
# and delete revlog entry
last = self.db.scalar(
"select id from revlog where cid = ? "
"order by id desc limit 1", c.id)
self.db.execute("delete from revlog where id = ?", last)
# restore any siblings
self.db.execute(
"update cards set queue=type,mod=?,usn=? where queue=-2 and nid=?",
intTime(), self.usn(), c.nid)
# and finally, update daily counts
n = 1 if c.queue == 3 else c.queue
type = ("new", "lrn", "rev")[n]
self.sched._updateStats(c, type, -1)
self.sched.reps -= 1
return c.id
def _markOp(self, name):
"Call via .save()"
if name:
self._undo = [2, name]
else:
# saving disables old checkpoint, but not review undo
if self._undo and self._undo[0] == 2:
self.clearUndo()
def _undoOp(self):
self.rollback()
self.clearUndo()
# DB maintenance
##########################################################################
def basicCheck(self):
"Basic integrity check for syncing. True if ok."
# cards without notes
if self.db.scalar("""
select 1 from cards where nid not in (select id from notes) limit 1"""):
return
# notes without cards or models
if self.db.scalar("""
select 1 from notes where id not in (select distinct nid from cards)
or mid not in %s limit 1""" % ids2str(self.models.ids())):
return
# invalid ords
for m in self.models.all():
# ignore clozes
if m['type'] != MODEL_STD:
continue
if self.db.scalar("""
select 1 from cards where ord not in %s and nid in (
select id from notes where mid = ?) limit 1""" %
ids2str([t['ord'] for t in m['tmpls']]),
m['id']):
return
return True
def fixIntegrity(self):
"Fix possible problems and rebuild caches."
problems = []
self.save()
oldSize = os.stat(self.path)[stat.ST_SIZE]
if self.db.scalar("pragma integrity_check") != "ok":
return (_("Collection is corrupt. Please see the manual."), False)
# note types with a missing model
ids = self.db.list("""
select id from notes where mid not in """ + ids2str(self.models.ids()))
if ids:
problems.append(
ngettext("Deleted %d note with missing note type.",
"Deleted %d notes with missing note type.", len(ids))
% len(ids))
self.remNotes(ids)
# for each model
for m in self.models.all():
for t in m['tmpls']:
if t['did'] == "None":
t['did'] = None
problems.append(_("Fixed AnkiDroid deck override bug."))
self.models.save(m)
if m['type'] == MODEL_STD:
# model with missing req specification
if 'req' not in m:
self.models._updateRequired(m)
problems.append(_("Fixed note type: %s") % m['name'])
# cards with invalid ordinal
ids = self.db.list("""
select id from cards where ord not in %s and nid in (
select id from notes where mid = ?)""" %
ids2str([t['ord'] for t in m['tmpls']]),
m['id'])
if ids:
problems.append(
ngettext("Deleted %d card with missing template.",
"Deleted %d cards with missing template.",
len(ids)) % len(ids))
self.remCards(ids)
# notes with invalid field count
ids = []
for id, flds in self.db.execute(
"select id, flds from notes where mid = ?", m['id']):
if (flds.count("\x1f") + 1) != len(m['flds']):
ids.append(id)
if ids:
problems.append(
ngettext("Deleted %d note with wrong field count.",
"Deleted %d notes with wrong field count.",
len(ids)) % len(ids))
self.remNotes(ids)
# delete any notes with missing cards
ids = self.db.list("""
select id from notes where id not in (select distinct nid from cards)""")
if ids:
cnt = len(ids)
problems.append(
ngettext("Deleted %d note with no cards.",
"Deleted %d notes with no cards.", cnt) % cnt)
self._remNotes(ids)
# cards with missing notes
ids = self.db.list("""
select id from cards where nid not in (select id from notes)""")
if ids:
cnt = len(ids)
problems.append(
ngettext("Deleted %d card with missing note.",
"Deleted %d cards with missing note.", cnt) % cnt)
self.remCards(ids)
# cards with odue set when it shouldn't be
ids = self.db.list("""
select id from cards where odue > 0 and (type=1 or queue=2) and not odid""")
if ids:
cnt = len(ids)
problems.append(
ngettext("Fixed %d card with invalid properties.",
"Fixed %d cards with invalid properties.", cnt) % cnt)
self.db.execute("update cards set odue=0 where id in "+
ids2str(ids))
# cards with odid set when not in a dyn deck
dids = [id for id in self.decks.allIds() if not self.decks.isDyn(id)]
ids = self.db.list("""
select id from cards where odid > 0 and did in %s""" % ids2str(dids))
if ids:
cnt = len(ids)
problems.append(
ngettext("Fixed %d card with invalid properties.",
"Fixed %d cards with invalid properties.", cnt) % cnt)
self.db.execute("update cards set odid=0, odue=0 where id in "+
ids2str(ids))
# tags
self.tags.registerNotes()
# field cache
for m in self.models.all():
self.updateFieldCache(self.models.nids(m))
# new cards can't have a due position > 32 bits
self.db.execute("""
update cards set due = 1000000, mod = ?, usn = ? where due > 1000000
and queue = 0""", intTime(), self.usn())
# new card position
self.conf['nextPos'] = self.db.scalar(
"select max(due)+1 from cards where type = 0") or 0
# reviews should have a reasonable due #
ids = self.db.list(
"select id from cards where queue = 2 and due > 10000")
if ids:
problems.append("Reviews had incorrect due date.")
self.db.execute(
"update cards set due = 0, mod = ?, usn = ? where id in %s"
% ids2str(ids), intTime(), self.usn())
# and finally, optimize
self.optimize()
newSize = os.stat(self.path)[stat.ST_SIZE]
txt = _("Database rebuilt and optimized.")
ok = not problems
problems.append(txt)
# if any problems were found, force a full sync
if not ok:
self.modSchema(check=False)
self.save()
return ("\n".join(problems), ok)
def optimize(self):
self.db.execute("vacuum")
self.db.execute("analyze")
self.lock()
# Logging
##########################################################################
def log(self, *args, **kwargs):
if not self._debugLog:
return
def customRepr(x):
if isinstance(x, basestring):
return x
return pprint.pformat(x)
path, num, fn, y = traceback.extract_stack(
limit=2+kwargs.get("stack", 0))[0]
buf = u"[%s] %s:%s(): %s" % (intTime(), os.path.basename(path), fn,
", ".join([customRepr(x) for x in args]))
self._logHnd.write(buf.encode("utf8") + "\n")
if os.environ.get("ANKIDEV"):
print buf
def _openLog(self):
if not self._debugLog:
return
lpath = re.sub("\.anki2$", ".log", self.path)
if os.path.exists(lpath) and os.path.getsize(lpath) > 10*1024*1024:
lpath2 = lpath + ".old"
if os.path.exists(lpath2):
os.unlink(lpath2)
os.rename(lpath, lpath2)
self._logHnd = open(lpath, "ab")
def _closeLog(self):
self._logHnd = None
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/collection.py
|
collection.py
|
import pprint
import time
from anki.hooks import runHook
from anki.utils import intTime, timestampID, joinFields
from anki.consts import *
# Cards
##########################################################################
# Type: 0=new, 1=learning, 2=due
# Queue: same as above, and:
# -1=suspended, -2=user buried, -3=sched buried
# Due is used differently for different queues.
# - new queue: note id or random int
# - rev queue: integer day
# - lrn queue: integer timestamp
class Card(object):
def __init__(self, col, id=None):
self.col = col
self.timerStarted = None
self._qa = None
self._note = None
if id:
self.id = id
self.load()
else:
# to flush, set nid, ord, and due
self.id = timestampID(col.db, "cards")
self.did = 1
self.crt = intTime()
self.type = 0
self.queue = 0
self.ivl = 0
self.factor = 0
self.reps = 0
self.lapses = 0
self.left = 0
self.odue = 0
self.odid = 0
self.flags = 0
self.data = ""
def load(self):
(self.id,
self.nid,
self.did,
self.ord,
self.mod,
self.usn,
self.type,
self.queue,
self.due,
self.ivl,
self.factor,
self.reps,
self.lapses,
self.left,
self.odue,
self.odid,
self.flags,
self.data) = self.col.db.first(
"select * from cards where id = ?", self.id)
self._qa = None
self._note = None
def flush(self):
self.mod = intTime()
self.usn = self.col.usn()
# bug check
if self.queue == 2 and self.odue and not self.col.decks.isDyn(self.did):
runHook("odueInvalid")
assert self.due < 4294967296
self.col.db.execute(
"""
insert or replace into cards values
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
self.id,
self.nid,
self.did,
self.ord,
self.mod,
self.usn,
self.type,
self.queue,
self.due,
self.ivl,
self.factor,
self.reps,
self.lapses,
self.left,
self.odue,
self.odid,
self.flags,
self.data)
self.col.log(self)
def flushSched(self):
self.mod = intTime()
self.usn = self.col.usn()
# bug checks
if self.queue == 2 and self.odue and not self.col.decks.isDyn(self.did):
runHook("odueInvalid")
assert self.due < 4294967296
self.col.db.execute(
"""update cards set
mod=?, usn=?, type=?, queue=?, due=?, ivl=?, factor=?, reps=?,
lapses=?, left=?, odue=?, odid=?, did=? where id = ?""",
self.mod, self.usn, self.type, self.queue, self.due, self.ivl,
self.factor, self.reps, self.lapses,
self.left, self.odue, self.odid, self.did, self.id)
self.col.log(self)
def q(self, reload=False, browser=False):
return self.css() + self._getQA(reload, browser)['q']
def a(self):
return self.css() + self._getQA()['a']
def css(self):
return "<style>%s</style>" % self.model()['css']
def _getQA(self, reload=False, browser=False):
if not self._qa or reload:
f = self.note(reload); m = self.model(); t = self.template()
data = [self.id, f.id, m['id'], self.odid or self.did, self.ord,
f.stringTags(), f.joinedFields()]
if browser:
args = (t.get('bqfmt'), t.get('bafmt'))
else:
args = tuple()
self._qa = self.col._renderQA(data, *args)
return self._qa
def note(self, reload=False):
if not self._note or reload:
self._note = self.col.getNote(self.nid)
return self._note
def model(self):
return self.col.models.get(self.note().mid)
def template(self):
m = self.model()
if m['type'] == MODEL_STD:
return self.model()['tmpls'][self.ord]
else:
return self.model()['tmpls'][0]
def startTimer(self):
self.timerStarted = time.time()
def timeLimit(self):
"Time limit for answering in milliseconds."
conf = self.col.decks.confForDid(self.odid or self.did)
return conf['maxTaken']*1000
def shouldShowTimer(self):
conf = self.col.decks.confForDid(self.odid or self.did)
return conf['timer']
def timeTaken(self):
"Time taken to answer card, in integer MS."
total = int((time.time() - self.timerStarted)*1000)
return min(total, self.timeLimit())
def isEmpty(self):
ords = self.col.models.availOrds(
self.model(), joinFields(self.note().fields))
if self.ord not in ords:
return True
def __repr__(self):
d = dict(self.__dict__)
# remove non-useful elements
del d['_note']
del d['_qa']
del d['col']
del d['timerStarted']
return pprint.pformat(d, width=300)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/cards.py
|
cards.py
|
import re, os, shutil, cgi
from anki.utils import checksum, call, namedtmp, tmpdir, isMac, stripHTML
from anki.hooks import addHook
from anki.lang import _
# if you modify these in an add-on, you must make sure to take tmp.tex as the
# input, and output tmp.png as the output file
latexCmds = [
["latex", "-interaction=nonstopmode", "tmp.tex"],
["dvipng", "-D", "200", "-T", "tight", "tmp.dvi", "-o", "tmp.png"]
# ["dvipng", "-D", "600", "-T", "tight", "-bg", "Transparent", "tmp.dvi", "-o", "tmp.png"]
]
build = True # if off, use existing media but don't create new
regexps = {
"standard": re.compile(r"\[latex\](.+?)\[/latex\]", re.DOTALL | re.IGNORECASE),
"expression": re.compile(r"\[\$\](.+?)\[/\$\]", re.DOTALL | re.IGNORECASE),
"math": re.compile(r"\[\$\$\](.+?)\[/\$\$\]", re.DOTALL | re.IGNORECASE),
}
# add standard tex install location to osx
if isMac:
os.environ['PATH'] += ":/usr/texbin:/Library/TeX/texbin"
def stripLatex(text):
for match in regexps['standard'].finditer(text):
text = text.replace(match.group(), "")
for match in regexps['expression'].finditer(text):
text = text.replace(match.group(), "")
for match in regexps['math'].finditer(text):
text = text.replace(match.group(), "")
return text
def mungeQA(html, type, fields, model, data, col):
"Convert TEXT with embedded latex tags to image links."
for match in regexps['standard'].finditer(html):
html = html.replace(match.group(), _imgLink(col, match.group(1), model))
for match in regexps['expression'].finditer(html):
html = html.replace(match.group(), _imgLink(
col, "$" + match.group(1) + "$", model))
for match in regexps['math'].finditer(html):
html = html.replace(match.group(), _imgLink(
col,
"\\begin{displaymath}" + match.group(1) + "\\end{displaymath}", model))
return html
def _imgLink(col, latex, model):
"Return an img link for LATEX, creating if necesssary."
txt = _latexFromHtml(col, latex)
fname = "latex-%s.png" % checksum(txt.encode("utf8"))
link = '<img class=latex src="%s">' % fname
if os.path.exists(fname):
return link
elif not build:
return u"[latex]%s[/latex]" % latex
else:
err = _buildImg(col, txt, fname, model)
if err:
return err
else:
return link
def _latexFromHtml(col, latex):
"Convert entities and fix newlines."
latex = re.sub("<br( /)?>|<div>", "\n", latex)
latex = stripHTML(latex)
return latex
def _buildImg(col, latex, fname, model):
# add header/footer & convert to utf8
latex = (model["latexPre"] + "\n" +
latex + "\n" +
model["latexPost"])
latex = latex.encode("utf8")
# it's only really secure if run in a jail, but these are the most common
tmplatex = latex.replace("\\includegraphics", "")
for bad in ("\\write18", "\\readline", "\\input", "\\include",
"\\catcode", "\\openout", "\\write", "\\loop",
"\\def", "\\shipout"):
# don't mind if the sequence is only part of a command
bad_re = "\\" + bad + "[^a-zA-Z]"
if re.search(bad_re, tmplatex):
return _("""\
For security reasons, '%s' is not allowed on cards. You can still use \
it by placing the command in a different package, and importing that \
package in the LaTeX header instead.""") % bad
# write into a temp file
log = open(namedtmp("latex_log.txt"), "w")
texpath = namedtmp("tmp.tex")
texfile = file(texpath, "w")
texfile.write(latex)
texfile.close()
mdir = col.media.dir()
oldcwd = os.getcwd()
png = namedtmp("tmp.png")
try:
# generate png
os.chdir(tmpdir())
for latexCmd in latexCmds:
if call(latexCmd, stdout=log, stderr=log):
return _errMsg(latexCmd[0], texpath)
# add to media
shutil.copyfile(png, os.path.join(mdir, fname))
return
finally:
os.chdir(oldcwd)
def _errMsg(type, texpath):
msg = (_("Error executing %s.") % type) + "<br>"
msg += (_("Generated file: %s") % texpath) + "<br>"
try:
log = open(namedtmp("latex_log.txt", rm=False)).read()
if not log:
raise Exception()
msg += "<small><pre>" + cgi.escape(log) + "</pre></small>"
except:
msg += _("Have you installed latex and dvipng?")
pass
return msg
# setup q/a filter
addHook("mungeQA", mungeQA)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/latex.py
|
latex.py
|
from __future__ import division
import time
import datetime
import json
import anki.js
from anki.utils import fmtTimeSpan, ids2str
from anki.lang import _, ngettext
# Card stats
##########################################################################
class CardStats(object):
def __init__(self, col, card):
self.col = col
self.card = card
def report(self):
c = self.card
fmt = lambda x, **kwargs: fmtTimeSpan(x, short=True, **kwargs)
self.txt = "<table width=100%>"
self.addLine(_("Added"), self.date(c.id/1000))
first = self.col.db.scalar(
"select min(id) from revlog where cid = ?", c.id)
last = self.col.db.scalar(
"select max(id) from revlog where cid = ?", c.id)
if first:
self.addLine(_("First Review"), self.date(first/1000))
self.addLine(_("Latest Review"), self.date(last/1000))
if c.type in (1,2):
if c.odid or c.queue < 0:
next = None
else:
if c.queue in (2,3):
next = time.time()+((c.due - self.col.sched.today)*86400)
else:
next = c.due
next = self.date(next)
if next:
self.addLine(_("Due"), next)
if c.queue == 2:
self.addLine(_("Interval"), fmt(c.ivl * 86400))
self.addLine(_("Ease"), "%d%%" % (c.factor/10.0))
self.addLine(_("Reviews"), "%d" % c.reps)
self.addLine(_("Lapses"), "%d" % c.lapses)
(cnt, total) = self.col.db.first(
"select count(), sum(time)/1000 from revlog where cid = :id",
id=c.id)
if cnt:
self.addLine(_("Average Time"), self.time(total / float(cnt)))
self.addLine(_("Total Time"), self.time(total))
elif c.queue == 0:
self.addLine(_("Position"), c.due)
self.addLine(_("Card Type"), c.template()['name'])
self.addLine(_("Note Type"), c.model()['name'])
self.addLine(_("Deck"), self.col.decks.name(c.did))
self.addLine(_("Note ID"), c.nid)
self.addLine(_("Card ID"), c.id)
self.txt += "</table>"
return self.txt
def addLine(self, k, v):
self.txt += self.makeLine(k, v)
def makeLine(self, k, v):
txt = "<tr><td align=left style='padding-right: 3px;'>"
txt += "<b>%s</b></td><td>%s</td></tr>" % (k, v)
return txt
def date(self, tm):
return time.strftime("%Y-%m-%d", time.localtime(tm))
def time(self, tm):
str = ""
if tm >= 60:
str = fmtTimeSpan((tm/60)*60, short=True, point=-1, unit=1)
if tm%60 != 0 or not str:
str += fmtTimeSpan(tm%60, point=2 if not str else -1, short=True)
return str
# Collection stats
##########################################################################
colYoung = "#7c7"
colMature = "#070"
colCum = "rgba(0,0,0,0.9)"
colLearn = "#00F"
colRelearn = "#c00"
colCram = "#ff0"
colIvl = "#077"
colHour = "#ccc"
colTime = "#770"
colUnseen = "#000"
colSusp = "#ff0"
class CollectionStats(object):
def __init__(self, col):
self.col = col
self._stats = None
self.type = 0
self.width = 600
self.height = 200
self.wholeCollection = False
def report(self, type=0):
# 0=days, 1=weeks, 2=months
self.type = type
from statsbg import bg
txt = self.css % bg
txt += self.todayStats()
txt += self.dueGraph()
txt += self.repsGraph()
txt += self.introductionGraph()
txt += self.ivlGraph()
txt += self.hourGraph()
txt += self.easeGraph()
txt += self.cardGraph()
txt += self.footer()
return "<script>%s\n</script><center>%s</center>" % (
anki.js.jquery+anki.js.plot, txt)
css = """
<style>
h1 { margin-bottom: 0; margin-top: 1em; }
.pielabel { text-align:center; padding:0px; color:white; }
body {background-image: url(data:image/png;base64,%s); }
</style>
"""
# Today stats
######################################################################
def todayStats(self):
b = self._title(_("Today"))
# studied today
lim = self._revlogLimit()
if lim:
lim = " and " + lim
cards, thetime, failed, lrn, rev, relrn, filt = self.col.db.first("""
select count(), sum(time)/1000,
sum(case when ease = 1 then 1 else 0 end), /* failed */
sum(case when type = 0 then 1 else 0 end), /* learning */
sum(case when type = 1 then 1 else 0 end), /* review */
sum(case when type = 2 then 1 else 0 end), /* relearn */
sum(case when type = 3 then 1 else 0 end) /* filter */
from revlog where id > ? """+lim, (self.col.sched.dayCutoff-86400)*1000)
cards = cards or 0
thetime = thetime or 0
failed = failed or 0
lrn = lrn or 0
rev = rev or 0
relrn = relrn or 0
filt = filt or 0
# studied
def bold(s):
return "<b>"+unicode(s)+"</b>"
msgp1 = ngettext("<!--studied-->%d card", "<!--studied-->%d cards", cards) % cards
b += _("Studied %(a)s in %(b)s today.") % dict(
a=bold(msgp1), b=bold(fmtTimeSpan(thetime, unit=1)))
# again/pass count
b += "<br>" + _("Again count: %s") % bold(failed)
if cards:
b += " " + _("(%s correct)") % bold(
"%0.1f%%" %((1-failed/float(cards))*100))
# type breakdown
b += "<br>"
b += (_("Learn: %(a)s, Review: %(b)s, Relearn: %(c)s, Filtered: %(d)s")
% dict(a=bold(lrn), b=bold(rev), c=bold(relrn), d=bold(filt)))
# mature today
mcnt, msum = self.col.db.first("""
select count(), sum(case when ease = 1 then 0 else 1 end) from revlog
where lastIvl >= 21 and id > ?"""+lim, (self.col.sched.dayCutoff-86400)*1000)
b += "<br>"
if mcnt:
b += _("Correct answers on mature cards: %(a)d/%(b)d (%(c).1f%%)") % dict(
a=msum, b=mcnt, c=(msum / float(mcnt) * 100))
else:
b += _("No mature cards were studied today.")
return b
# Due and cumulative due
######################################################################
def dueGraph(self):
if self.type == 0:
start = 0; end = 31; chunk = 1;
elif self.type == 1:
start = 0; end = 52; chunk = 7
elif self.type == 2:
start = 0; end = None; chunk = 30
d = self._due(start, end, chunk)
yng = []
mtr = []
tot = 0
totd = []
for day in d:
yng.append((day[0], day[1]))
mtr.append((day[0], day[2]))
tot += day[1]+day[2]
totd.append((day[0], tot))
data = [
dict(data=mtr, color=colMature, label=_("Mature")),
dict(data=yng, color=colYoung, label=_("Young")),
]
if len(totd) > 1:
data.append(
dict(data=totd, color=colCum, label=_("Cumulative"), yaxis=2,
bars={'show': False}, lines=dict(show=True), stack=False))
txt = self._title(
_("Forecast"),
_("The number of reviews due in the future."))
xaxis = dict(tickDecimals=0, min=-0.5)
if end is not None:
xaxis['max'] = end-0.5
txt += self._graph(id="due", data=data,
ylabel2=_("Cumulative Cards"), conf=dict(
xaxis=xaxis, yaxes=[dict(min=0), dict(
min=0, tickDecimals=0, position="right")]))
txt += self._dueInfo(tot, len(totd)*chunk)
return txt
def _dueInfo(self, tot, num):
i = []
self._line(i, _("Total"), ngettext("%d review", "%d reviews", tot) % tot)
self._line(i, _("Average"), self._avgDay(
tot, num, _("reviews")))
tomorrow = self.col.db.scalar("""
select count() from cards where did in %s and queue in (2,3)
and due = ?""" % self._limit(), self.col.sched.today+1)
tomorrow = ngettext("%d card", "%d cards", tomorrow) % tomorrow
self._line(i, _("Due tomorrow"), tomorrow)
return self._lineTbl(i)
def _due(self, start=None, end=None, chunk=1):
lim = ""
if start is not None:
lim += " and due-:today >= %d" % start
if end is not None:
lim += " and day < %d" % end
return self.col.db.all("""
select (due-:today)/:chunk as day,
sum(case when ivl < 21 then 1 else 0 end), -- yng
sum(case when ivl >= 21 then 1 else 0 end) -- mtr
from cards
where did in %s and queue in (2,3)
%s
group by day order by day""" % (self._limit(), lim),
today=self.col.sched.today,
chunk=chunk)
# Added, reps and time spent
######################################################################
def introductionGraph(self):
if self.type == 0:
days = 30; chunk = 1
elif self.type == 1:
days = 52; chunk = 7
else:
days = None; chunk = 30
return self._introductionGraph(self._added(days, chunk),
days, _("Added"))
def _introductionGraph(self, data, days, title):
if not data:
return ""
d = data
conf = dict(
xaxis=dict(tickDecimals=0, max=0.5),
yaxes=[dict(min=0), dict(position="right",min=0)])
if days is not None:
conf['xaxis']['min'] = -days+0.5
def plot(id, data, ylabel, ylabel2):
return self._graph(
id, data=data, conf=conf, ylabel=ylabel, ylabel2=ylabel2)
# graph
(repdata, repsum) = self._splitRepData(d, ((1, colLearn, ""),))
txt = self._title(
title, _("The number of new cards you have added."))
txt += plot("intro", repdata, ylabel=_("Cards"), ylabel2=_("Cumulative Cards"))
# total and per day average
tot = sum([i[1] for i in d])
period = self._periodDays()
if not period:
# base off date of earliest added card
period = self._deckAge('add')
i = []
self._line(i, _("Total"), ngettext("%d card", "%d cards", tot) % tot)
self._line(i, _("Average"), self._avgDay(tot, period, _("cards")))
txt += self._lineTbl(i)
return txt
def repsGraph(self):
if self.type == 0:
days = 30; chunk = 1
elif self.type == 1:
days = 52; chunk = 7
else:
days = None; chunk = 30
return self._repsGraph(self._done(days, chunk),
days,
_("Review Count"),
_("Review Time"))
def _repsGraph(self, data, days, reptitle, timetitle):
if not data:
return ""
d = data
conf = dict(
xaxis=dict(tickDecimals=0, max=0.5),
yaxes=[dict(min=0), dict(position="right",min=0)])
if days is not None:
conf['xaxis']['min'] = -days+0.5
def plot(id, data, ylabel, ylabel2):
return self._graph(
id, data=data, conf=conf, ylabel=ylabel, ylabel2=ylabel2)
# reps
(repdata, repsum) = self._splitRepData(d, (
(3, colMature, _("Mature")),
(2, colYoung, _("Young")),
(4, colRelearn, _("Relearn")),
(1, colLearn, _("Learn")),
(5, colCram, _("Cram"))))
txt = self._title(
reptitle, _("The number of questions you have answered."))
txt += plot("reps", repdata, ylabel=_("Answers"), ylabel2=_(
"Cumulative Answers"))
(daysStud, fstDay) = self._daysStudied()
rep, tot = self._ansInfo(repsum, daysStud, fstDay, _("reviews"))
txt += rep
# time
(timdata, timsum) = self._splitRepData(d, (
(8, colMature, _("Mature")),
(7, colYoung, _("Young")),
(9, colRelearn, _("Relearn")),
(6, colLearn, _("Learn")),
(10, colCram, _("Cram"))))
if self.type == 0:
t = _("Minutes")
convHours = False
else:
t = _("Hours")
convHours = True
txt += self._title(timetitle, _("The time taken to answer the questions."))
txt += plot("time", timdata, ylabel=t, ylabel2=_("Cumulative %s") % t)
rep, tot2 = self._ansInfo(
timsum, daysStud, fstDay, _("minutes"), convHours, total=tot)
txt += rep
return txt
def _ansInfo(self, totd, studied, first, unit, convHours=False, total=None):
if not totd:
return
tot = totd[-1][1]
period = self._periodDays()
if not period:
# base off earliest repetition date
period = self._deckAge('review')
i = []
self._line(i, _("Days studied"),
_("<b>%(pct)d%%</b> (%(x)s of %(y)s)") % dict(
x=studied, y=period, pct=studied/float(period)*100),
bold=False)
if convHours:
tunit = _("hours")
else:
tunit = unit
self._line(i, _("Total"), _("%(tot)s %(unit)s") % dict(
unit=tunit, tot=int(tot)))
if convHours:
# convert to minutes
tot *= 60
self._line(i, _("Average for days studied"), self._avgDay(
tot, studied, unit))
if studied != period:
# don't display if you did study every day
self._line(i, _("If you studied every day"), self._avgDay(
tot, period, unit))
if total and tot:
perMin = total / float(tot)
perMin = round(perMin, 1)
# don't round down to zero
if perMin < 0.1:
text = _("less than 0.1 cards/minute")
else:
text = _("%.01f cards/minute") % perMin
self._line(
i, _("Average answer time"),
_("%(a)0.1fs (%(b)s)") % dict(a=(tot*60)/total, b=text))
return self._lineTbl(i), int(tot)
def _splitRepData(self, data, spec):
sep = {}
totcnt = {}
totd = {}
alltot = []
allcnt = 0
for (n, col, lab) in spec:
totcnt[n] = 0
totd[n] = []
sum = []
for row in data:
for (n, col, lab) in spec:
if n not in sep:
sep[n] = []
sep[n].append((row[0], row[n]))
totcnt[n] += row[n]
allcnt += row[n]
totd[n].append((row[0], totcnt[n]))
alltot.append((row[0], allcnt))
ret = []
for (n, col, lab) in spec:
if len(totd[n]) and totcnt[n]:
# bars
ret.append(dict(data=sep[n], color=col, label=lab))
# lines
ret.append(dict(
data=totd[n], color=col, label=None, yaxis=2,
bars={'show': False}, lines=dict(show=True), stack=-n))
return (ret, alltot)
def _added(self, num=7, chunk=1):
lims = []
if num is not None:
lims.append("id > %d" % (
(self.col.sched.dayCutoff-(num*chunk*86400))*1000))
lims.append("did in %s" % self._limit())
if lims:
lim = "where " + " and ".join(lims)
else:
lim = ""
if self.type == 0:
tf = 60.0 # minutes
else:
tf = 3600.0 # hours
return self.col.db.all("""
select
(cast((id/1000.0 - :cut) / 86400.0 as int))/:chunk as day,
count(id)
from cards %s
group by day order by day""" % lim, cut=self.col.sched.dayCutoff,tf=tf, chunk=chunk)
def _done(self, num=7, chunk=1):
lims = []
if num is not None:
lims.append("id > %d" % (
(self.col.sched.dayCutoff-(num*chunk*86400))*1000))
lim = self._revlogLimit()
if lim:
lims.append(lim)
if lims:
lim = "where " + " and ".join(lims)
else:
lim = ""
if self.type == 0:
tf = 60.0 # minutes
else:
tf = 3600.0 # hours
return self.col.db.all("""
select
(cast((id/1000.0 - :cut) / 86400.0 as int))/:chunk as day,
sum(case when type = 0 then 1 else 0 end), -- lrn count
sum(case when type = 1 and lastIvl < 21 then 1 else 0 end), -- yng count
sum(case when type = 1 and lastIvl >= 21 then 1 else 0 end), -- mtr count
sum(case when type = 2 then 1 else 0 end), -- lapse count
sum(case when type = 3 then 1 else 0 end), -- cram count
sum(case when type = 0 then time/1000.0 else 0 end)/:tf, -- lrn time
-- yng + mtr time
sum(case when type = 1 and lastIvl < 21 then time/1000.0 else 0 end)/:tf,
sum(case when type = 1 and lastIvl >= 21 then time/1000.0 else 0 end)/:tf,
sum(case when type = 2 then time/1000.0 else 0 end)/:tf, -- lapse time
sum(case when type = 3 then time/1000.0 else 0 end)/:tf -- cram time
from revlog %s
group by day order by day""" % lim,
cut=self.col.sched.dayCutoff,
tf=tf,
chunk=chunk)
def _daysStudied(self):
lims = []
num = self._periodDays()
if num:
lims.append(
"id > %d" %
((self.col.sched.dayCutoff-(num*86400))*1000))
rlim = self._revlogLimit()
if rlim:
lims.append(rlim)
if lims:
lim = "where " + " and ".join(lims)
else:
lim = ""
return self.col.db.first("""
select count(), abs(min(day)) from (select
(cast((id/1000 - :cut) / 86400.0 as int)+1) as day
from revlog %s
group by day order by day)""" % lim,
cut=self.col.sched.dayCutoff)
# Intervals
######################################################################
def ivlGraph(self):
(ivls, all, avg, max_) = self._ivls()
tot = 0
totd = []
if not ivls or not all:
return ""
for (grp, cnt) in ivls:
tot += cnt
totd.append((grp, tot/float(all)*100))
if self.type == 0:
ivlmax = 31
elif self.type == 1:
ivlmax = 52
else:
ivlmax = max(5, ivls[-1][0])
txt = self._title(_("Intervals"),
_("Delays until reviews are shown again."))
txt += self._graph(id="ivl", ylabel2=_("Percentage"), data=[
dict(data=ivls, color=colIvl),
dict(data=totd, color=colCum, yaxis=2,
bars={'show': False}, lines=dict(show=True), stack=False)
], conf=dict(
xaxis=dict(min=-0.5, max=ivlmax+0.5),
yaxes=[dict(), dict(position="right", max=105)]))
i = []
self._line(i, _("Average interval"), fmtTimeSpan(avg*86400))
self._line(i, _("Longest interval"), fmtTimeSpan(max_*86400))
return txt + self._lineTbl(i)
def _ivls(self):
if self.type == 0:
chunk = 1; lim = " and grp <= 30"
elif self.type == 1:
chunk = 7; lim = " and grp <= 52"
else:
chunk = 30; lim = ""
data = [self.col.db.all("""
select ivl / :chunk as grp, count() from cards
where did in %s and queue = 2 %s
group by grp
order by grp""" % (self._limit(), lim), chunk=chunk)]
return data + list(self.col.db.first("""
select count(), avg(ivl), max(ivl) from cards where did in %s and queue = 2""" %
self._limit()))
# Eases
######################################################################
def easeGraph(self):
# 3 + 4 + 4 + spaces on sides and middle = 15
# yng starts at 1+3+1 = 5
# mtr starts at 5+4+1 = 10
d = {'lrn':[], 'yng':[], 'mtr':[]}
types = ("lrn", "yng", "mtr")
eases = self._eases()
for (type, ease, cnt) in eases:
if type == 1:
ease += 5
elif type == 2:
ease += 10
n = types[type]
d[n].append((ease, cnt))
ticks = [[1,1],[2,2],[3,3],
[6,1],[7,2],[8,3],[9,4],
[11, 1],[12,2],[13,3],[14,4]]
txt = self._title(_("Answer Buttons"),
_("The number of times you have pressed each button."))
txt += self._graph(id="ease", data=[
dict(data=d['lrn'], color=colLearn, label=_("Learning")),
dict(data=d['yng'], color=colYoung, label=_("Young")),
dict(data=d['mtr'], color=colMature, label=_("Mature")),
], type="barsLine", conf=dict(
xaxis=dict(ticks=ticks, min=0, max=15)),
ylabel=_("Answers"))
txt += self._easeInfo(eases)
return txt
def _easeInfo(self, eases):
types = {0: [0, 0], 1: [0, 0], 2: [0,0]}
for (type, ease, cnt) in eases:
if ease == 1:
types[type][0] += cnt
else:
types[type][1] += cnt
i = []
for type in range(3):
(bad, good) = types[type]
tot = bad + good
try:
pct = good / float(tot) * 100
except:
pct = 0
i.append(_(
"Correct: <b>%(pct)0.2f%%</b><br>(%(good)d of %(tot)d)") % dict(
pct=pct, good=good, tot=tot))
return ("""
<center><table width=%dpx><tr><td width=50></td><td align=center>""" % self.width +
"</td><td align=center>".join(i) +
"</td></tr></table></center>")
def _eases(self):
lims = []
lim = self._revlogLimit()
if lim:
lims.append(lim)
if self.type == 0:
days = 30
elif self.type == 1:
days = 365
else:
days = None
if days is not None:
lims.append("id > %d" % (
(self.col.sched.dayCutoff-(days*86400))*1000))
if lims:
lim = "where " + " and ".join(lims)
else:
lim = ""
return self.col.db.all("""
select (case
when type in (0,2) then 0
when lastIvl < 21 then 1
else 2 end) as thetype,
(case when type in (0,2) and ease = 4 then 3 else ease end), count() from revlog %s
group by thetype, ease
order by thetype, ease""" % lim)
# Hourly retention
######################################################################
def hourGraph(self):
data = self._hourRet()
if not data:
return ""
shifted = []
counts = []
mcount = 0
trend = []
peak = 0
for d in data:
hour = (d[0] - 4) % 24
pct = d[1]
if pct > peak:
peak = pct
shifted.append((hour, pct))
counts.append((hour, d[2]))
if d[2] > mcount:
mcount = d[2]
shifted.sort()
counts.sort()
if len(counts) < 4:
return ""
for d in shifted:
hour = d[0]
pct = d[1]
if not trend:
trend.append((hour, pct))
else:
prev = trend[-1][1]
diff = pct-prev
diff /= 3.0
diff = round(diff, 1)
trend.append((hour, prev+diff))
txt = self._title(_("Hourly Breakdown"),
_("Review success rate for each hour of the day."))
txt += self._graph(id="hour", data=[
dict(data=shifted, color=colCum, label=_("% Correct")),
dict(data=counts, color=colHour, label=_("Answers"), yaxis=2,
bars=dict(barWidth=0.2), stack=False)
], conf=dict(
xaxis=dict(ticks=[[0, _("4AM")], [6, _("10AM")],
[12, _("4PM")], [18, _("10PM")], [23, _("3AM")]]),
yaxes=[dict(max=peak), dict(position="right", max=mcount)]),
ylabel=_("% Correct"), ylabel2=_("Reviews"))
txt += _("Hours with less than 30 reviews are not shown.")
return txt
def _hourRet(self):
lim = self._revlogLimit()
if lim:
lim = " and " + lim
sd = datetime.datetime.fromtimestamp(self.col.crt)
pd = self._periodDays()
if pd:
lim += " and id > %d" % ((self.col.sched.dayCutoff-(86400*pd))*1000)
return self.col.db.all("""
select
23 - ((cast((:cut - id/1000) / 3600.0 as int)) %% 24) as hour,
sum(case when ease = 1 then 0 else 1 end) /
cast(count() as float) * 100,
count()
from revlog where type in (0,1,2) %s
group by hour having count() > 30 order by hour""" % lim,
cut=self.col.sched.dayCutoff-(sd.hour*3600))
# Cards
######################################################################
def cardGraph(self):
# graph data
div = self._cards()
d = []
for c, (t, col) in enumerate((
(_("Mature"), colMature),
(_("Young+Learn"), colYoung),
(_("Unseen"), colUnseen),
(_("Suspended+Buried"), colSusp))):
d.append(dict(data=div[c], label="%s: %s" % (t, div[c]), color=col))
# text data
i = []
(c, f) = self.col.db.first("""
select count(id), count(distinct nid) from cards
where did in %s """ % self._limit())
self._line(i, _("Total cards"), c)
self._line(i, _("Total notes"), f)
(low, avg, high) = self._factors()
if low:
self._line(i, _("Lowest ease"), "%d%%" % low)
self._line(i, _("Average ease"), "%d%%" % avg)
self._line(i, _("Highest ease"), "%d%%" % high)
info = "<table width=100%>" + "".join(i) + "</table><p>"
info += _('''\
A card's <i>ease</i> is the size of the next interval \
when you answer "good" on a review.''')
txt = self._title(_("Cards Types"),
_("The division of cards in your deck(s)."))
txt += "<table width=%d><tr><td>%s</td><td>%s</td></table>" % (
self.width,
self._graph(id="cards", data=d, type="pie"),
info)
return txt
def _line(self, i, a, b, bold=True):
colon = _(":")
if bold:
i.append(("<tr><td width=200 align=right>%s%s</td><td><b>%s</b></td></tr>") % (a,colon,b))
else:
i.append(("<tr><td width=200 align=right>%s%s</td><td>%s</td></tr>") % (a,colon,b))
def _lineTbl(self, i):
return "<table width=400>" + "".join(i) + "</table>"
def _factors(self):
return self.col.db.first("""
select
min(factor) / 10.0,
avg(factor) / 10.0,
max(factor) / 10.0
from cards where did in %s and queue = 2""" % self._limit())
def _cards(self):
return self.col.db.first("""
select
sum(case when queue=2 and ivl >= 21 then 1 else 0 end), -- mtr
sum(case when queue in (1,3) or (queue=2 and ivl < 21) then 1 else 0 end), -- yng/lrn
sum(case when queue=0 then 1 else 0 end), -- new
sum(case when queue<0 then 1 else 0 end) -- susp
from cards where did in %s""" % self._limit())
# Footer
######################################################################
def footer(self):
b = "<br><br><font size=1>"
b += _("Generated on %s") % time.asctime(time.localtime(time.time()))
b += "<br>"
if self.wholeCollection:
deck = _("whole collection")
else:
deck = self.col.decks.current()['name']
b += _("Scope: %s") % deck
b += "<br>"
b += _("Period: %s") % [
_("1 month"),
_("1 year"),
_("deck life")
][self.type]
return b
# Tools
######################################################################
def _graph(self, id, data, conf={},
type="bars", ylabel=_("Cards"), timeTicks=True, ylabel2=""):
# display settings
if type == "pie":
conf['legend'] = {'container': "#%sLegend" % id, 'noColumns':2}
else:
conf['legend'] = {'container': "#%sLegend" % id, 'noColumns':10}
conf['series'] = dict(stack=True)
if not 'yaxis' in conf:
conf['yaxis'] = {}
conf['yaxis']['labelWidth'] = 40
if 'xaxis' not in conf:
conf['xaxis'] = {}
if timeTicks:
conf['timeTicks'] = (_("d"), _("w"), _("mo"))[self.type]
# types
width = self.width
height = self.height
if type == "bars":
conf['series']['bars'] = dict(
show=True, barWidth=0.8, align="center", fill=0.7, lineWidth=0)
elif type == "barsLine":
conf['series']['bars'] = dict(
show=True, barWidth=0.8, align="center", fill=0.7, lineWidth=3)
elif type == "fill":
conf['series']['lines'] = dict(show=True, fill=True)
elif type == "pie":
width /= 2.3
height *= 1.5
ylabel = ""
conf['series']['pie'] = dict(
show=True,
radius=1,
stroke=dict(color="#fff", width=5),
label=dict(
show=True,
radius=0.8,
threshold=0.01,
background=dict(
opacity=0.5,
color="#000"
)))
#conf['legend'] = dict(show=False)
return (
"""
<table cellpadding=0 cellspacing=10>
<tr>
<td><div style="width: 150px; text-align: center; position:absolute;
-webkit-transform: rotate(-90deg) translateY(-85px);
font-weight: bold;
">%(ylab)s</div></td>
<td>
<center><div id=%(id)sLegend></div></center>
<div id="%(id)s" style="width:%(w)spx; height:%(h)spx;"></div>
</td>
<td><div style="width: 150px; text-align: center; position:absolute;
-webkit-transform: rotate(90deg) translateY(65px);
font-weight: bold;
">%(ylab2)s</div></td>
</tr></table>
<script>
$(function () {
var conf = %(conf)s;
if (conf.timeTicks) {
conf.xaxis.tickFormatter = function (val, axis) {
return val.toFixed(0)+conf.timeTicks;
}
}
conf.yaxis.minTickSize = 1;
conf.yaxis.tickFormatter = function (val, axis) {
return val.toFixed(0);
}
if (conf.series.pie) {
conf.series.pie.label.formatter = function(label, series){
return '<div class=pielabel>'+Math.round(series.percent)+'%%</div>';
};
}
$.plot($("#%(id)s"), %(data)s, conf);
});
</script>""" % dict(
id=id, w=width, h=height,
ylab=ylabel, ylab2=ylabel2,
data=json.dumps(data), conf=json.dumps(conf)))
def _limit(self):
if self.wholeCollection:
return ids2str([d['id'] for d in self.col.decks.all()])
return self.col.sched._deckLimit()
def _revlogLimit(self):
if self.wholeCollection:
return ""
return ("cid in (select id from cards where did in %s)" %
ids2str(self.col.decks.active()))
def _title(self, title, subtitle=""):
return '<h1>%s</h1>%s' % (title, subtitle)
def _deckAge(self, by):
lim = self._revlogLimit()
if lim:
lim = " where " + lim
if by == 'review':
t = self.col.db.scalar("select id from revlog %s order by id limit 1" % lim)
elif by == 'add':
lim = "where did in %s" % ids2str(self.col.decks.active())
t = self.col.db.scalar("select id from cards %s order by id limit 1" % lim)
if not t:
period = 1
else:
period = max(
1, int(1+((self.col.sched.dayCutoff - (t/1000)) / 86400)))
return period
def _periodDays(self):
if self.type == 0:
return 30
elif self.type == 1:
return 365
else:
return None
def _avgDay(self, tot, num, unit):
vals = []
try:
vals.append(_("%(a)0.1f %(b)s/day") % dict(a=tot/float(num), b=unit))
return ", ".join(vals)
except ZeroDivisionError:
return ""
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/stats.py
|
stats.py
|
from __future__ import division
import time
import random
import itertools
from operator import itemgetter
from heapq import *
#from anki.cards import Card
from anki.utils import ids2str, intTime, fmtTimeSpan
from anki.lang import _
from anki.consts import *
from anki.hooks import runHook
# queue types: 0=new/cram, 1=lrn, 2=rev, 3=day lrn, -1=suspended, -2=buried
# revlog types: 0=lrn, 1=rev, 2=relrn, 3=cram
# positive revlog intervals are in days (rev), negative in seconds (lrn)
class Scheduler(object):
name = "std"
haveCustomStudy = True
_spreadRev = True
_burySiblingsOnAnswer = True
def __init__(self, col):
self.col = col
self.queueLimit = 50
self.reportLimit = 1000
self.reps = 0
self.today = None
self._haveQueues = False
self._updateCutoff()
def getCard(self):
"Pop the next card from the queue. None if finished."
self._checkDay()
if not self._haveQueues:
self.reset()
card = self._getCard()
if card:
self.col.log(card)
if not self._burySiblingsOnAnswer:
self._burySiblings(card)
self.reps += 1
card.startTimer()
return card
def reset(self):
self._updateCutoff()
self._resetLrn()
self._resetRev()
self._resetNew()
self._haveQueues = True
def answerCard(self, card, ease):
self.col.log()
assert ease >= 1 and ease <= 4
self.col.markReview(card)
if self._burySiblingsOnAnswer:
self._burySiblings(card)
card.reps += 1
# former is for logging new cards, latter also covers filt. decks
card.wasNew = card.type == 0
wasNewQ = card.queue == 0
if wasNewQ:
# came from the new queue, move to learning
card.queue = 1
# if it was a new card, it's now a learning card
if card.type == 0:
card.type = 1
# init reps to graduation
card.left = self._startingLeft(card)
# dynamic?
if card.odid and card.type == 2:
if self._resched(card):
# reviews get their ivl boosted on first sight
card.ivl = self._dynIvlBoost(card)
card.odue = self.today + card.ivl
self._updateStats(card, 'new')
if card.queue in (1, 3):
self._answerLrnCard(card, ease)
if not wasNewQ:
self._updateStats(card, 'lrn')
elif card.queue == 2:
self._answerRevCard(card, ease)
self._updateStats(card, 'rev')
else:
raise Exception("Invalid queue")
self._updateStats(card, 'time', card.timeTaken())
card.mod = intTime()
card.usn = self.col.usn()
card.flushSched()
def counts(self, card=None):
counts = [self.newCount, self.lrnCount, self.revCount]
if card:
idx = self.countIdx(card)
if idx == 1:
counts[1] += card.left // 1000
else:
counts[idx] += 1
return tuple(counts)
def dueForecast(self, days=7):
"Return counts over next DAYS. Includes today."
daysd = dict(self.col.db.all("""
select due, count() from cards
where did in %s and queue = 2
and due between ? and ?
group by due
order by due""" % self._deckLimit(),
self.today,
self.today+days-1))
for d in range(days):
d = self.today+d
if d not in daysd:
daysd[d] = 0
# return in sorted order
ret = [x[1] for x in sorted(daysd.items())]
return ret
def countIdx(self, card):
if card.queue == 3:
return 1
return card.queue
def answerButtons(self, card):
if card.odue:
# normal review in dyn deck?
if card.odid and card.queue == 2:
return 4
conf = self._lrnConf(card)
if card.type in (0,1) or len(conf['delays']) > 1:
return 3
return 2
elif card.queue == 2:
return 4
else:
return 3
def unburyCards(self):
"Unbury cards."
self.col.conf['lastUnburied'] = self.today
self.col.log(
self.col.db.list("select id from cards where queue = -2"))
self.col.db.execute(
"update cards set queue=type where queue = -2")
def unburyCardsForDeck(self):
sids = ids2str(self.col.decks.active())
self.col.log(
self.col.db.list("select id from cards where queue = -2 and did in %s"
% sids))
self.col.db.execute(
"update cards set mod=?,usn=?,queue=type where queue = -2 and did in %s"
% sids, intTime(), self.col.usn())
# Rev/lrn/time daily stats
##########################################################################
def _updateStats(self, card, type, cnt=1):
key = type+"Today"
for g in ([self.col.decks.get(card.did)] +
self.col.decks.parents(card.did)):
# add
g[key][1] += cnt
self.col.decks.save(g)
def extendLimits(self, new, rev):
cur = self.col.decks.current()
parents = self.col.decks.parents(cur['id'])
children = [self.col.decks.get(did) for (name, did) in
self.col.decks.children(cur['id'])]
for g in [cur] + parents + children:
# add
g['newToday'][1] -= new
g['revToday'][1] -= rev
self.col.decks.save(g)
def _walkingCount(self, limFn=None, cntFn=None):
tot = 0
pcounts = {}
# for each of the active decks
for did in self.col.decks.active():
# early alphas were setting the active ids as a str
did = int(did)
# get the individual deck's limit
lim = limFn(self.col.decks.get(did))
if not lim:
continue
# check the parents
parents = self.col.decks.parents(did)
for p in parents:
# add if missing
if p['id'] not in pcounts:
pcounts[p['id']] = limFn(p)
# take minimum of child and parent
lim = min(pcounts[p['id']], lim)
# see how many cards we actually have
cnt = cntFn(did, lim)
# if non-zero, decrement from parent counts
for p in parents:
pcounts[p['id']] -= cnt
# we may also be a parent
pcounts[did] = lim - cnt
# and add to running total
tot += cnt
return tot
# Deck list
##########################################################################
def deckDueList(self):
"Returns [deckname, did, rev, lrn, new]"
self._checkDay()
self.col.decks.recoverOrphans()
decks = self.col.decks.all()
decks.sort(key=itemgetter('name'))
lims = {}
data = []
def parent(name):
parts = name.split("::")
if len(parts) < 2:
return None
parts = parts[:-1]
return "::".join(parts)
for deck in decks:
# if we've already seen the exact same deck name, remove the
# invalid duplicate and reload
if deck['name'] in lims:
self.col.decks.rem(deck['id'], cardsToo=False, childrenToo=True)
return self.deckDueList()
p = parent(deck['name'])
# new
nlim = self._deckNewLimitSingle(deck)
if p:
if p not in lims:
# if parent was missing, this deck is invalid, and we
# need to reload the deck list
self.col.decks.rem(deck['id'], cardsToo=False, childrenToo=True)
return self.deckDueList()
nlim = min(nlim, lims[p][0])
new = self._newForDeck(deck['id'], nlim)
# learning
lrn = self._lrnForDeck(deck['id'])
# reviews
rlim = self._deckRevLimitSingle(deck)
if p:
rlim = min(rlim, lims[p][1])
rev = self._revForDeck(deck['id'], rlim)
# save to list
data.append([deck['name'], deck['id'], rev, lrn, new])
# add deck as a parent
lims[deck['name']] = [nlim, rlim]
return data
def deckDueTree(self):
return self._groupChildren(self.deckDueList())
def _groupChildren(self, grps):
# first, split the group names into components
for g in grps:
g[0] = g[0].split("::")
# and sort based on those components
grps.sort(key=itemgetter(0))
# then run main function
return self._groupChildrenMain(grps)
def _groupChildrenMain(self, grps):
tree = []
# group and recurse
def key(grp):
return grp[0][0]
for (head, tail) in itertools.groupby(grps, key=key):
tail = list(tail)
did = None
rev = 0
new = 0
lrn = 0
children = []
for c in tail:
if len(c[0]) == 1:
# current node
did = c[1]
rev += c[2]
lrn += c[3]
new += c[4]
else:
# set new string to tail
c[0] = c[0][1:]
children.append(c)
children = self._groupChildrenMain(children)
# tally up children counts
for ch in children:
rev += ch[2]
lrn += ch[3]
new += ch[4]
# limit the counts to the deck's limits
conf = self.col.decks.confForDid(did)
deck = self.col.decks.get(did)
if not conf['dyn']:
rev = max(0, min(rev, conf['rev']['perDay']-deck['revToday'][1]))
new = max(0, min(new, conf['new']['perDay']-deck['newToday'][1]))
tree.append((head, did, rev, lrn, new, children))
return tuple(tree)
# Getting the next card
##########################################################################
def _getCard(self):
"Return the next due card id, or None."
# learning card due?
c = self._getLrnCard()
if c:
return c
# new first, or time for one?
if self._timeForNewCard():
c = self._getNewCard()
if c:
return c
# card due for review?
c = self._getRevCard()
if c:
return c
# day learning card due?
c = self._getLrnDayCard()
if c:
return c
# new cards left?
c = self._getNewCard()
if c:
return c
# collapse or finish
return self._getLrnCard(collapse=True)
# New cards
##########################################################################
def _resetNewCount(self):
cntFn = lambda did, lim: self.col.db.scalar("""
select count() from (select 1 from cards where
did = ? and queue = 0 limit ?)""", did, lim)
self.newCount = self._walkingCount(self._deckNewLimitSingle, cntFn)
def _resetNew(self):
self._resetNewCount()
self._newDids = self.col.decks.active()[:]
self._newQueue = []
self._updateNewCardRatio()
def _fillNew(self):
if self._newQueue:
return True
if not self.newCount:
return False
while self._newDids:
did = self._newDids[0]
lim = min(self.queueLimit, self._deckNewLimit(did))
if lim:
# fill the queue with the current did
self._newQueue = self.col.db.list("""
select id from cards where did = ? and queue = 0 order by due limit ?""", did, lim)
if self._newQueue:
self._newQueue.reverse()
return True
# nothing left in the deck; move to next
self._newDids.pop(0)
if self.newCount:
# if we didn't get a card but the count is non-zero,
# we need to check again for any cards that were
# removed from the queue but not buried
self._resetNew()
return self._fillNew()
def _getNewCard(self):
if self._fillNew():
self.newCount -= 1
return self.col.getCard(self._newQueue.pop())
def _updateNewCardRatio(self):
if self.col.conf['newSpread'] == NEW_CARDS_DISTRIBUTE:
if self.newCount:
self.newCardModulus = (
(self.newCount + self.revCount) // self.newCount)
# if there are cards to review, ensure modulo >= 2
if self.revCount:
self.newCardModulus = max(2, self.newCardModulus)
return
self.newCardModulus = 0
def _timeForNewCard(self):
"True if it's time to display a new card when distributing."
if not self.newCount:
return False
if self.col.conf['newSpread'] == NEW_CARDS_LAST:
return False
elif self.col.conf['newSpread'] == NEW_CARDS_FIRST:
return True
elif self.newCardModulus:
return self.reps and self.reps % self.newCardModulus == 0
def _deckNewLimit(self, did, fn=None):
if not fn:
fn = self._deckNewLimitSingle
sel = self.col.decks.get(did)
lim = -1
# for the deck and each of its parents
for g in [sel] + self.col.decks.parents(did):
rem = fn(g)
if lim == -1:
lim = rem
else:
lim = min(rem, lim)
return lim
def _newForDeck(self, did, lim):
"New count for a single deck."
if not lim:
return 0
lim = min(lim, self.reportLimit)
return self.col.db.scalar("""
select count() from
(select 1 from cards where did = ? and queue = 0 limit ?)""", did, lim)
def _deckNewLimitSingle(self, g):
"Limit for deck without parent limits."
if g['dyn']:
return self.reportLimit
c = self.col.decks.confForDid(g['id'])
return max(0, c['new']['perDay'] - g['newToday'][1])
def totalNewForCurrentDeck(self):
return self.col.db.scalar(
"""
select count() from cards where id in (
select id from cards where did in %s and queue = 0 limit ?)"""
% ids2str(self.col.decks.active()), self.reportLimit)
# Learning queues
##########################################################################
def _resetLrnCount(self):
# sub-day
self.lrnCount = self.col.db.scalar("""
select sum(left/1000) from (select left from cards where
did in %s and queue = 1 and due < ? limit %d)""" % (
self._deckLimit(), self.reportLimit),
self.dayCutoff) or 0
# day
self.lrnCount += self.col.db.scalar("""
select count() from cards where did in %s and queue = 3
and due <= ? limit %d""" % (self._deckLimit(), self.reportLimit),
self.today)
def _resetLrn(self):
self._resetLrnCount()
self._lrnQueue = []
self._lrnDayQueue = []
self._lrnDids = self.col.decks.active()[:]
# sub-day learning
def _fillLrn(self):
if not self.lrnCount:
return False
if self._lrnQueue:
return True
self._lrnQueue = self.col.db.all("""
select due, id from cards where
did in %s and queue = 1 and due < :lim
limit %d""" % (self._deckLimit(), self.reportLimit), lim=self.dayCutoff)
# as it arrives sorted by did first, we need to sort it
self._lrnQueue.sort()
return self._lrnQueue
def _getLrnCard(self, collapse=False):
if self._fillLrn():
cutoff = time.time()
if collapse:
cutoff += self.col.conf['collapseTime']
if self._lrnQueue[0][0] < cutoff:
id = heappop(self._lrnQueue)[1]
card = self.col.getCard(id)
self.lrnCount -= card.left // 1000
return card
# daily learning
def _fillLrnDay(self):
if not self.lrnCount:
return False
if self._lrnDayQueue:
return True
while self._lrnDids:
did = self._lrnDids[0]
# fill the queue with the current did
self._lrnDayQueue = self.col.db.list("""
select id from cards where
did = ? and queue = 3 and due <= ? limit ?""",
did, self.today, self.queueLimit)
if self._lrnDayQueue:
# order
r = random.Random()
r.seed(self.today)
r.shuffle(self._lrnDayQueue)
# is the current did empty?
if len(self._lrnDayQueue) < self.queueLimit:
self._lrnDids.pop(0)
return True
# nothing left in the deck; move to next
self._lrnDids.pop(0)
def _getLrnDayCard(self):
if self._fillLrnDay():
self.lrnCount -= 1
return self.col.getCard(self._lrnDayQueue.pop())
def _answerLrnCard(self, card, ease):
# ease 1=no, 2=yes, 3=remove
conf = self._lrnConf(card)
if card.odid and not card.wasNew:
type = 3
elif card.type == 2:
type = 2
else:
type = 0
leaving = False
# lrnCount was decremented once when card was fetched
lastLeft = card.left
# immediate graduate?
if ease == 3:
self._rescheduleAsRev(card, conf, True)
leaving = True
# graduation time?
elif ease == 2 and (card.left%1000)-1 <= 0:
self._rescheduleAsRev(card, conf, False)
leaving = True
else:
# one step towards graduation
if ease == 2:
# decrement real left count and recalculate left today
left = (card.left % 1000) - 1
card.left = self._leftToday(conf['delays'], left)*1000 + left
# failed
else:
card.left = self._startingLeft(card)
resched = self._resched(card)
if 'mult' in conf and resched:
# review that's lapsed
card.ivl = max(1, conf['minInt'], card.ivl*conf['mult'])
else:
# new card; no ivl adjustment
pass
if resched and card.odid:
card.odue = self.today + 1
delay = self._delayForGrade(conf, card.left)
if card.due < time.time():
# not collapsed; add some randomness
delay *= random.uniform(1, 1.25)
card.due = int(time.time() + delay)
# due today?
if card.due < self.dayCutoff:
self.lrnCount += card.left // 1000
# if the queue is not empty and there's nothing else to do, make
# sure we don't put it at the head of the queue and end up showing
# it twice in a row
card.queue = 1
if self._lrnQueue and not self.revCount and not self.newCount:
smallestDue = self._lrnQueue[0][0]
card.due = max(card.due, smallestDue+1)
heappush(self._lrnQueue, (card.due, card.id))
else:
# the card is due in one or more days, so we need to use the
# day learn queue
ahead = ((card.due - self.dayCutoff) // 86400) + 1
card.due = self.today + ahead
card.queue = 3
self._logLrn(card, ease, conf, leaving, type, lastLeft)
def _delayForGrade(self, conf, left):
left = left % 1000
try:
delay = conf['delays'][-left]
except IndexError:
if conf['delays']:
delay = conf['delays'][0]
else:
# user deleted final step; use dummy value
delay = 1
return delay*60
def _lrnConf(self, card):
if card.type == 2:
return self._lapseConf(card)
else:
return self._newConf(card)
def _rescheduleAsRev(self, card, conf, early):
lapse = card.type == 2
if lapse:
if self._resched(card):
card.due = max(self.today+1, card.odue)
else:
card.due = card.odue
card.odue = 0
else:
self._rescheduleNew(card, conf, early)
card.queue = 2
card.type = 2
# if we were dynamic, graduating means moving back to the old deck
resched = self._resched(card)
if card.odid:
card.did = card.odid
card.odue = 0
card.odid = 0
# if rescheduling is off, it needs to be set back to a new card
if not resched and not lapse:
card.queue = card.type = 0
card.due = self.col.nextID("pos")
def _startingLeft(self, card):
if card.type == 2:
conf = self._lapseConf(card)
else:
conf = self._lrnConf(card)
tot = len(conf['delays'])
tod = self._leftToday(conf['delays'], tot)
return tot + tod*1000
def _leftToday(self, delays, left, now=None):
"The number of steps that can be completed by the day cutoff."
if not now:
now = intTime()
delays = delays[-left:]
ok = 0
for i in range(len(delays)):
now += delays[i]*60
if now > self.dayCutoff:
break
ok = i
return ok+1
def _graduatingIvl(self, card, conf, early, adj=True):
if card.type == 2:
# lapsed card being relearnt
if card.odid:
if conf['resched']:
return self._dynIvlBoost(card)
return card.ivl
if not early:
# graduate
ideal = conf['ints'][0]
else:
# early remove
ideal = conf['ints'][1]
if adj:
return self._adjRevIvl(card, ideal)
else:
return ideal
def _rescheduleNew(self, card, conf, early):
"Reschedule a new card that's graduated for the first time."
card.ivl = self._graduatingIvl(card, conf, early)
card.due = self.today+card.ivl
card.factor = conf['initialFactor']
def _logLrn(self, card, ease, conf, leaving, type, lastLeft):
lastIvl = -(self._delayForGrade(conf, lastLeft))
ivl = card.ivl if leaving else -(self._delayForGrade(conf, card.left))
def log():
self.col.db.execute(
"insert into revlog values (?,?,?,?,?,?,?,?,?)",
int(time.time()*1000), card.id, self.col.usn(), ease,
ivl, lastIvl, card.factor, card.timeTaken(), type)
try:
log()
except:
# duplicate pk; retry in 10ms
time.sleep(0.01)
log()
def removeLrn(self, ids=None):
"Remove cards from the learning queues."
if ids:
extra = " and id in "+ids2str(ids)
else:
# benchmarks indicate it's about 10x faster to search all decks
# with the index than scan the table
extra = " and did in "+ids2str(self.col.decks.allIds())
# review cards in relearning
self.col.db.execute("""
update cards set
due = odue, queue = 2, mod = %d, usn = %d, odue = 0
where queue in (1,3) and type = 2
%s
""" % (intTime(), self.col.usn(), extra))
# new cards in learning
self.forgetCards(self.col.db.list(
"select id from cards where queue in (1,3) %s" % extra))
def _lrnForDeck(self, did):
cnt = self.col.db.scalar(
"""
select sum(left/1000) from
(select left from cards where did = ? and queue = 1 and due < ? limit ?)""",
did, intTime() + self.col.conf['collapseTime'], self.reportLimit) or 0
return cnt + self.col.db.scalar(
"""
select count() from
(select 1 from cards where did = ? and queue = 3
and due <= ? limit ?)""",
did, self.today, self.reportLimit)
# Reviews
##########################################################################
def _deckRevLimit(self, did):
return self._deckNewLimit(did, self._deckRevLimitSingle)
def _deckRevLimitSingle(self, d):
if d['dyn']:
return self.reportLimit
c = self.col.decks.confForDid(d['id'])
return max(0, c['rev']['perDay'] - d['revToday'][1])
def _revForDeck(self, did, lim):
lim = min(lim, self.reportLimit)
return self.col.db.scalar(
"""
select count() from
(select 1 from cards where did = ? and queue = 2
and due <= ? limit ?)""",
did, self.today, lim)
def _resetRevCount(self):
def cntFn(did, lim):
return self.col.db.scalar("""
select count() from (select id from cards where
did = ? and queue = 2 and due <= ? limit %d)""" % lim,
did, self.today)
self.revCount = self._walkingCount(
self._deckRevLimitSingle, cntFn)
def _resetRev(self):
self._resetRevCount()
self._revQueue = []
self._revDids = self.col.decks.active()[:]
def _fillRev(self):
if self._revQueue:
return True
if not self.revCount:
return False
while self._revDids:
did = self._revDids[0]
lim = min(self.queueLimit, self._deckRevLimit(did))
if lim:
# fill the queue with the current did
self._revQueue = self.col.db.list("""
select id from cards where
did = ? and queue = 2 and due <= ? limit ?""",
did, self.today, lim)
if self._revQueue:
# ordering
if self.col.decks.get(did)['dyn']:
# dynamic decks need due order preserved
self._revQueue.reverse()
else:
# random order for regular reviews
r = random.Random()
r.seed(self.today)
r.shuffle(self._revQueue)
# is the current did empty?
if len(self._revQueue) < lim:
self._revDids.pop(0)
return True
# nothing left in the deck; move to next
self._revDids.pop(0)
if self.revCount:
# if we didn't get a card but the count is non-zero,
# we need to check again for any cards that were
# removed from the queue but not buried
self._resetRev()
return self._fillRev()
def _getRevCard(self):
if self._fillRev():
self.revCount -= 1
return self.col.getCard(self._revQueue.pop())
def totalRevForCurrentDeck(self):
return self.col.db.scalar(
"""
select count() from cards where id in (
select id from cards where did in %s and queue = 2 and due <= ? limit ?)"""
% ids2str(self.col.decks.active()), self.today, self.reportLimit)
# Answering a review card
##########################################################################
def _answerRevCard(self, card, ease):
delay = 0
if ease == 1:
delay = self._rescheduleLapse(card)
else:
self._rescheduleRev(card, ease)
self._logRev(card, ease, delay)
def _rescheduleLapse(self, card):
conf = self._lapseConf(card)
card.lastIvl = card.ivl
if self._resched(card):
card.lapses += 1
card.ivl = self._nextLapseIvl(card, conf)
card.factor = max(1300, card.factor-200)
card.due = self.today + card.ivl
# if it's a filtered deck, update odue as well
if card.odid:
card.odue = card.due
# if suspended as a leech, nothing to do
delay = 0
if self._checkLeech(card, conf) and card.queue == -1:
return delay
# if no relearning steps, nothing to do
if not conf['delays']:
return delay
# record rev due date for later
if not card.odue:
card.odue = card.due
delay = self._delayForGrade(conf, 0)
card.due = int(delay + time.time())
card.left = self._startingLeft(card)
# queue 1
if card.due < self.dayCutoff:
self.lrnCount += card.left // 1000
card.queue = 1
heappush(self._lrnQueue, (card.due, card.id))
else:
# day learn queue
ahead = ((card.due - self.dayCutoff) // 86400) + 1
card.due = self.today + ahead
card.queue = 3
return delay
def _nextLapseIvl(self, card, conf):
return max(conf['minInt'], int(card.ivl*conf['mult']))
def _rescheduleRev(self, card, ease):
# update interval
card.lastIvl = card.ivl
if self._resched(card):
self._updateRevIvl(card, ease)
# then the rest
card.factor = max(1300, card.factor+[-150, 0, 150][ease-2])
card.due = self.today + card.ivl
else:
card.due = card.odue
if card.odid:
card.did = card.odid
card.odid = 0
card.odue = 0
def _logRev(self, card, ease, delay):
def log():
self.col.db.execute(
"insert into revlog values (?,?,?,?,?,?,?,?,?)",
int(time.time()*1000), card.id, self.col.usn(), ease,
-delay or card.ivl, card.lastIvl, card.factor, card.timeTaken(),
1)
try:
log()
except:
# duplicate pk; retry in 10ms
time.sleep(0.01)
log()
# Interval management
##########################################################################
def _nextRevIvl(self, card, ease):
"Ideal next interval for CARD, given EASE."
delay = self._daysLate(card)
conf = self._revConf(card)
fct = card.factor / 1000
ivl2 = self._constrainedIvl((card.ivl + delay // 4) * 1.2, conf, card.ivl)
ivl3 = self._constrainedIvl((card.ivl + delay // 2) * fct, conf, ivl2)
ivl4 = self._constrainedIvl(
(card.ivl + delay) * fct * conf['ease4'], conf, ivl3)
if ease == 2:
interval = ivl2
elif ease == 3:
interval = ivl3
elif ease == 4:
interval = ivl4
# interval capped?
return min(interval, conf['maxIvl'])
def _fuzzedIvl(self, ivl):
min, max = self._fuzzIvlRange(ivl)
return random.randint(min, max)
def _fuzzIvlRange(self, ivl):
if ivl < 2:
return [1, 1]
elif ivl == 2:
return [2, 3]
elif ivl < 7:
fuzz = int(ivl*0.25)
elif ivl < 30:
fuzz = max(2, int(ivl*0.15))
else:
fuzz = max(4, int(ivl*0.05))
# fuzz at least a day
fuzz = max(fuzz, 1)
return [ivl-fuzz, ivl+fuzz]
def _constrainedIvl(self, ivl, conf, prev):
"Integer interval after interval factor and prev+1 constraints applied."
new = ivl * conf.get('ivlFct', 1)
return int(max(new, prev+1))
def _daysLate(self, card):
"Number of days later than scheduled."
due = card.odue if card.odid else card.due
return max(0, self.today - due)
def _updateRevIvl(self, card, ease):
idealIvl = self._nextRevIvl(card, ease)
card.ivl = self._adjRevIvl(card, idealIvl)
def _adjRevIvl(self, card, idealIvl):
if self._spreadRev:
idealIvl = self._fuzzedIvl(idealIvl)
return idealIvl
# Dynamic deck handling
##########################################################################
def rebuildDyn(self, did=None):
"Rebuild a dynamic deck."
did = did or self.col.decks.selected()
deck = self.col.decks.get(did)
assert deck['dyn']
# move any existing cards back first, then fill
self.emptyDyn(did)
ids = self._fillDyn(deck)
if not ids:
return
# and change to our new deck
self.col.decks.select(did)
return ids
def _fillDyn(self, deck):
search, limit, order = deck['terms'][0]
orderlimit = self._dynOrder(order, limit)
if search.strip():
search = "(%s)" % search
search = "%s -is:suspended -is:buried -deck:filtered" % search
try:
ids = self.col.findCards(search, order=orderlimit)
except:
ids = []
return ids
# move the cards over
self.col.log(deck['id'], ids)
self._moveToDyn(deck['id'], ids)
return ids
def emptyDyn(self, did, lim=None):
if not lim:
lim = "did = %s" % did
self.col.log(self.col.db.list("select id from cards where %s" % lim))
# move out of cram queue
self.col.db.execute("""
update cards set did = odid, queue = (case when type = 1 then 0
else type end), type = (case when type = 1 then 0 else type end),
due = odue, odue = 0, odid = 0, usn = ? where %s""" % lim,
self.col.usn())
def remFromDyn(self, cids):
self.emptyDyn(None, "id in %s and odid" % ids2str(cids))
def _dynOrder(self, o, l):
if o == DYN_OLDEST:
t = "c.mod"
elif o == DYN_RANDOM:
t = "random()"
elif o == DYN_SMALLINT:
t = "ivl"
elif o == DYN_BIGINT:
t = "ivl desc"
elif o == DYN_LAPSES:
t = "lapses desc"
elif o == DYN_ADDED:
t = "n.id"
elif o == DYN_REVADDED:
t = "n.id desc"
elif o == DYN_DUE:
t = "c.due"
elif o == DYN_DUEPRIORITY:
t = "(case when queue=2 and due <= %d then (ivl / cast(%d-due+0.001 as real)) else 100000+due end)" % (
self.today, self.today)
else:
# if we don't understand the term, default to due order
t = "c.due"
return t + " limit %d" % l
def _moveToDyn(self, did, ids):
deck = self.col.decks.get(did)
data = []
t = intTime(); u = self.col.usn()
for c, id in enumerate(ids):
# start at -100000 so that reviews are all due
data.append((did, -100000+c, u, id))
# due reviews stay in the review queue. careful: can't use
# "odid or did", as sqlite converts to boolean
queue = """
(case when type=2 and (case when odue then odue <= %d else due <= %d end)
then 2 else 0 end)"""
queue %= (self.today, self.today)
self.col.db.executemany("""
update cards set
odid = (case when odid then odid else did end),
odue = (case when odue then odue else due end),
did = ?, queue = %s, due = ?, usn = ? where id = ?""" % queue, data)
def _dynIvlBoost(self, card):
assert card.odid and card.type == 2
assert card.factor
elapsed = card.ivl - (card.odue - self.today)
factor = ((card.factor/1000)+1.2)/2
ivl = int(max(card.ivl, elapsed * factor, 1))
conf = self._revConf(card)
return min(conf['maxIvl'], ivl)
# Leeches
##########################################################################
def _checkLeech(self, card, conf):
"Leech handler. True if card was a leech."
lf = conf['leechFails']
if not lf:
return
# if over threshold or every half threshold reps after that
if (card.lapses >= lf and
(card.lapses-lf) % (max(lf // 2, 1)) == 0):
# add a leech tag
f = card.note()
f.addTag("leech")
f.flush()
# handle
a = conf['leechAction']
if a == 0:
# if it has an old due, remove it from cram/relearning
if card.odue:
card.due = card.odue
if card.odid:
card.did = card.odid
card.odue = card.odid = 0
card.queue = -1
# notify UI
runHook("leech", card)
return True
# Tools
##########################################################################
def _cardConf(self, card):
return self.col.decks.confForDid(card.did)
def _newConf(self, card):
conf = self._cardConf(card)
# normal deck
if not card.odid:
return conf['new']
# dynamic deck; override some attributes, use original deck for others
oconf = self.col.decks.confForDid(card.odid)
delays = conf['delays'] or oconf['new']['delays']
return dict(
# original deck
ints=oconf['new']['ints'],
initialFactor=oconf['new']['initialFactor'],
bury=oconf['new'].get("bury", True),
# overrides
delays=delays,
separate=conf['separate'],
order=NEW_CARDS_DUE,
perDay=self.reportLimit
)
def _lapseConf(self, card):
conf = self._cardConf(card)
# normal deck
if not card.odid:
return conf['lapse']
# dynamic deck; override some attributes, use original deck for others
oconf = self.col.decks.confForDid(card.odid)
delays = conf['delays'] or oconf['lapse']['delays']
return dict(
# original deck
minInt=oconf['lapse']['minInt'],
leechFails=oconf['lapse']['leechFails'],
leechAction=oconf['lapse']['leechAction'],
mult=oconf['lapse']['mult'],
# overrides
delays=delays,
resched=conf['resched'],
)
def _revConf(self, card):
conf = self._cardConf(card)
# normal deck
if not card.odid:
return conf['rev']
# dynamic deck
return self.col.decks.confForDid(card.odid)['rev']
def _deckLimit(self):
return ids2str(self.col.decks.active())
def _resched(self, card):
conf = self._cardConf(card)
if not conf['dyn']:
return True
return conf['resched']
# Daily cutoff
##########################################################################
def _updateCutoff(self):
oldToday = self.today
# days since col created
self.today = int((time.time() - self.col.crt) // 86400)
# end of day cutoff
self.dayCutoff = self.col.crt + (self.today+1)*86400
if oldToday != self.today:
self.col.log(self.today, self.dayCutoff)
# update all daily counts, but don't save decks to prevent needless
# conflicts. we'll save on card answer instead
def update(g):
for t in "new", "rev", "lrn", "time":
key = t+"Today"
if g[key][0] != self.today:
g[key] = [self.today, 0]
for deck in self.col.decks.all():
update(deck)
# unbury if the day has rolled over
unburied = self.col.conf.get("lastUnburied", 0)
if unburied < self.today:
self.unburyCards()
def _checkDay(self):
# check if the day has rolled over
if time.time() > self.dayCutoff:
self.reset()
# Deck finished state
##########################################################################
def finishedMsg(self):
return ("<b>"+_(
"Congratulations! You have finished this deck for now.")+
"</b><br><br>" + self._nextDueMsg())
def _nextDueMsg(self):
line = []
# the new line replacements are so we don't break translations
# in a point release
if self.revDue():
line.append(_("""\
Today's review limit has been reached, but there are still cards
waiting to be reviewed. For optimum memory, consider increasing
the daily limit in the options.""").replace("\n", " "))
if self.newDue():
line.append(_("""\
There are more new cards available, but the daily limit has been
reached. You can increase the limit in the options, but please
bear in mind that the more new cards you introduce, the higher
your short-term review workload will become.""").replace("\n", " "))
if self.haveBuried():
if self.haveCustomStudy:
now = " " + _("To see them now, click the Unbury button below.")
else:
now = ""
line.append(_("""\
Some related or buried cards were delayed until a later session.""")+now)
if self.haveCustomStudy and not self.col.decks.current()['dyn']:
line.append(_("""\
To study outside of the normal schedule, click the Custom Study button below."""))
return "<p>".join(line)
def revDue(self):
"True if there are any rev cards due."
return self.col.db.scalar(
("select 1 from cards where did in %s and queue = 2 "
"and due <= ? limit 1") % self._deckLimit(),
self.today)
def newDue(self):
"True if there are any new cards due."
return self.col.db.scalar(
("select 1 from cards where did in %s and queue = 0 "
"limit 1") % self._deckLimit())
def haveBuried(self):
sdids = ids2str(self.col.decks.active())
cnt = self.col.db.scalar(
"select 1 from cards where queue = -2 and did in %s limit 1" % sdids)
return not not cnt
# Next time reports
##########################################################################
def nextIvlStr(self, card, ease, short=False):
"Return the next interval for CARD as a string."
ivl = self.nextIvl(card, ease)
if not ivl:
return _("(end)")
s = fmtTimeSpan(ivl, short=short)
if ivl < self.col.conf['collapseTime']:
s = "<"+s
return s
def nextIvl(self, card, ease):
"Return the next interval for CARD, in seconds."
if card.queue in (0,1,3):
return self._nextLrnIvl(card, ease)
elif ease == 1:
# lapsed
conf = self._lapseConf(card)
if conf['delays']:
return conf['delays'][0]*60
return self._nextLapseIvl(card, conf)*86400
else:
# review
return self._nextRevIvl(card, ease)*86400
# this isn't easily extracted from the learn code
def _nextLrnIvl(self, card, ease):
if card.queue == 0:
card.left = self._startingLeft(card)
conf = self._lrnConf(card)
if ease == 1:
# fail
return self._delayForGrade(conf, len(conf['delays']))
elif ease == 3:
# early removal
if not self._resched(card):
return 0
return self._graduatingIvl(card, conf, True, adj=False) * 86400
else:
left = card.left%1000 - 1
if left <= 0:
# graduate
if not self._resched(card):
return 0
return self._graduatingIvl(card, conf, False, adj=False) * 86400
else:
return self._delayForGrade(conf, left)
# Suspending
##########################################################################
def suspendCards(self, ids):
"Suspend cards."
self.col.log(ids)
self.remFromDyn(ids)
self.removeLrn(ids)
self.col.db.execute(
"update cards set queue=-1,mod=?,usn=? where id in "+
ids2str(ids), intTime(), self.col.usn())
def unsuspendCards(self, ids):
"Unsuspend cards."
self.col.log(ids)
self.col.db.execute(
"update cards set queue=type,mod=?,usn=? "
"where queue = -1 and id in "+ ids2str(ids),
intTime(), self.col.usn())
def buryCards(self, cids):
self.col.log(cids)
self.remFromDyn(cids)
self.removeLrn(cids)
self.col.db.execute("""
update cards set queue=-2,mod=?,usn=? where id in """+ids2str(cids),
intTime(), self.col.usn())
def buryNote(self, nid):
"Bury all cards for note until next session."
cids = self.col.db.list(
"select id from cards where nid = ? and queue >= 0", nid)
self.buryCards(cids)
# Sibling spacing
##########################################################################
def _burySiblings(self, card):
toBury = []
nconf = self._newConf(card)
buryNew = nconf.get("bury", True)
rconf = self._revConf(card)
buryRev = rconf.get("bury", True)
# loop through and remove from queues
for cid,queue in self.col.db.execute("""
select id, queue from cards where nid=? and id!=?
and (queue=0 or (queue=2 and due<=?))""",
card.nid, card.id, self.today):
if queue == 2:
if buryRev:
toBury.append(cid)
# if bury disabled, we still discard to give same-day spacing
try:
self._revQueue.remove(cid)
except ValueError:
pass
else:
# if bury disabled, we still discard to give same-day spacing
if buryNew:
toBury.append(cid)
try:
self._newQueue.remove(cid)
except ValueError:
pass
# then bury
if toBury:
self.col.db.execute(
"update cards set queue=-2,mod=?,usn=? where id in "+ids2str(toBury),
intTime(), self.col.usn())
self.col.log(toBury)
# Resetting
##########################################################################
def forgetCards(self, ids):
"Put cards at the end of the new queue."
self.remFromDyn(ids)
self.col.db.execute(
"update cards set type=0,queue=0,ivl=0,due=0,odue=0,factor=?"
" where id in "+ids2str(ids), 2500)
pmax = self.col.db.scalar(
"select max(due) from cards where type=0") or 0
# takes care of mod + usn
self.sortCards(ids, start=pmax+1)
self.col.log(ids)
def reschedCards(self, ids, imin, imax):
"Put cards in review queue with a new interval in days (min, max)."
d = []
t = self.today
mod = intTime()
for id in ids:
r = random.randint(imin, imax)
d.append(dict(id=id, due=r+t, ivl=max(1, r), mod=mod,
usn=self.col.usn(), fact=2500))
self.remFromDyn(ids)
self.col.db.executemany("""
update cards set type=2,queue=2,ivl=:ivl,due=:due,odue=0,
usn=:usn,mod=:mod,factor=:fact where id=:id""",
d)
self.col.log(ids)
def resetCards(self, ids):
"Completely reset cards for export."
sids = ids2str(ids)
# we want to avoid resetting due number of existing new cards on export
nonNew = self.col.db.list(
"select id from cards where id in %s and (queue != 0 or type != 0)"
% sids)
# reset all cards
self.col.db.execute(
"update cards set reps=0,lapses=0,odid=0,odue=0,queue=0"
" where id in %s" % sids
)
# and forget any non-new cards, changing their due numbers
self.forgetCards(nonNew)
self.col.log(ids)
# Repositioning new cards
##########################################################################
def sortCards(self, cids, start=1, step=1, shuffle=False, shift=False):
scids = ids2str(cids)
now = intTime()
nids = []
nidsSet = set()
for id in cids:
nid = self.col.db.scalar("select nid from cards where id = ?", id)
if nid not in nidsSet:
nids.append(nid)
nidsSet.add(nid)
if not nids:
# no new cards
return
# determine nid ordering
due = {}
if shuffle:
random.shuffle(nids)
for c, nid in enumerate(nids):
due[nid] = start+c*step
high = start+c*step
# shift?
if shift:
low = self.col.db.scalar(
"select min(due) from cards where due >= ? and type = 0 "
"and id not in %s" % scids,
start)
if low is not None:
shiftby = high - low + 1
self.col.db.execute("""
update cards set mod=?, usn=?, due=due+? where id not in %s
and due >= ? and queue = 0""" % scids, now, self.col.usn(), shiftby, low)
# reorder cards
d = []
for id, nid in self.col.db.execute(
"select id, nid from cards where type = 0 and id in "+scids):
d.append(dict(now=now, due=due[nid], usn=self.col.usn(), cid=id))
self.col.db.executemany(
"update cards set due=:due,mod=:now,usn=:usn where id = :cid", d)
def randomizeCards(self, did):
cids = self.col.db.list("select id from cards where did = ?", did)
self.sortCards(cids, shuffle=True)
def orderCards(self, did):
cids = self.col.db.list("select id from cards where did = ? order by id", did)
self.sortCards(cids)
def resortConf(self, conf):
for did in self.col.decks.didsForConf(conf):
if conf['new']['order'] == 0:
self.randomizeCards(did)
else:
self.orderCards(did)
# for post-import
def maybeRandomizeDeck(self, did=None):
if not did:
did = self.col.decks.selected()
conf = self.col.decks.confForDid(did)
# in order due?
if conf['new']['order'] == NEW_CARDS_RANDOM:
self.randomizeCards(did)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/sched.py
|
sched.py
|
import urllib
import sys
import gzip
import random
from cStringIO import StringIO
import httplib2
from anki.db import DB
from anki.utils import ids2str, intTime, json, isWin, isMac, platDesc, checksum
from anki.consts import *
from hooks import runHook
import anki
from lang import ngettext
# syncing vars
HTTP_TIMEOUT = 90
HTTP_PROXY = None
# badly named; means no retries
httplib2.RETRIES = 1
try:
# httplib2 >=0.7.7
_proxy_info_from_environment = httplib2.proxy_info_from_environment
_proxy_info_from_url = httplib2.proxy_info_from_url
except AttributeError:
# httplib2 <0.7.7
_proxy_info_from_environment = httplib2.ProxyInfo.from_environment
_proxy_info_from_url = httplib2.ProxyInfo.from_url
# Httplib2 connection object
######################################################################
def httpCon():
certs = os.path.join(os.path.dirname(__file__), "ankiweb.certs")
if not os.path.exists(certs):
if isWin:
certs = os.path.join(
os.path.dirname(os.path.abspath(sys.argv[0])),
"ankiweb.certs")
elif isMac:
certs = os.path.join(
os.path.dirname(os.path.abspath(sys.argv[0])),
"../Resources/ankiweb.certs")
else:
assert 0, "Your distro has not packaged Anki correctly."
return httplib2.Http(
timeout=HTTP_TIMEOUT, ca_certs=certs,
proxy_info=HTTP_PROXY,
disable_ssl_certificate_validation=not not HTTP_PROXY)
# Proxy handling
######################################################################
def _setupProxy():
global HTTP_PROXY
# set in env?
p = _proxy_info_from_environment()
if not p:
# platform-specific fetch
url = None
if isWin:
r = urllib.getproxies_registry()
if 'https' in r:
url = r['https']
elif 'http' in r:
url = r['http']
elif isMac:
r = urllib.getproxies_macosx_sysconf()
if 'https' in r:
url = r['https']
elif 'http' in r:
url = r['http']
if url:
p = _proxy_info_from_url(url, _proxyMethod(url))
if p:
p.proxy_rdns = True
HTTP_PROXY = p
def _proxyMethod(url):
if url.lower().startswith("https"):
return "https"
else:
return "http"
_setupProxy()
# Incremental syncing
##########################################################################
class Syncer(object):
def __init__(self, col, server=None):
self.col = col
self.server = server
def sync(self):
"Returns 'noChanges', 'fullSync', 'success', etc"
self.syncMsg = ""
self.uname = ""
# if the deck has any pending changes, flush them first and bump mod
# time
self.col.save()
# step 1: login & metadata
runHook("sync", "login")
meta = self.server.meta()
self.col.log("rmeta", meta)
if not meta:
return "badAuth"
# server requested abort?
self.syncMsg = meta['msg']
if not meta['cont']:
return "serverAbort"
else:
# don't abort, but if 'msg' is not blank, gui should show 'msg'
# after sync finishes and wait for confirmation before hiding
pass
rscm = meta['scm']
rts = meta['ts']
self.rmod = meta['mod']
self.maxUsn = meta['usn']
# this is a temporary measure to address the problem of users
# forgetting which email address they've used - it will be removed
# when enough time has passed
self.uname = meta.get("uname", "")
meta = self.meta()
self.col.log("lmeta", meta)
self.lmod = meta['mod']
self.minUsn = meta['usn']
lscm = meta['scm']
lts = meta['ts']
if abs(rts - lts) > 300:
self.col.log("clock off")
return "clockOff"
if self.lmod == self.rmod:
self.col.log("no changes")
return "noChanges"
elif lscm != rscm:
self.col.log("schema diff")
return "fullSync"
self.lnewer = self.lmod > self.rmod
# step 1.5: check collection is valid
if not self.col.basicCheck():
self.col.log("basic check")
return "basicCheckFailed"
# step 2: deletions
runHook("sync", "meta")
lrem = self.removed()
rrem = self.server.start(
minUsn=self.minUsn, lnewer=self.lnewer, graves=lrem)
self.remove(rrem)
# ...and small objects
lchg = self.changes()
rchg = self.server.applyChanges(changes=lchg)
self.mergeChanges(lchg, rchg)
# step 3: stream large tables from server
runHook("sync", "server")
while 1:
runHook("sync", "stream")
chunk = self.server.chunk()
self.col.log("server chunk", chunk)
self.applyChunk(chunk=chunk)
if chunk['done']:
break
# step 4: stream to server
runHook("sync", "client")
while 1:
runHook("sync", "stream")
chunk = self.chunk()
self.col.log("client chunk", chunk)
self.server.applyChunk(chunk=chunk)
if chunk['done']:
break
# step 5: sanity check
runHook("sync", "sanity")
c = self.sanityCheck()
ret = self.server.sanityCheck2(client=c)
if ret['status'] != "ok":
# roll back and force full sync
self.col.rollback()
self.col.modSchema(False)
self.col.save()
return "sanityCheckFailed"
# finalize
runHook("sync", "finalize")
mod = self.server.finish()
self.finish(mod)
return "success"
def meta(self):
return dict(
mod=self.col.mod,
scm=self.col.scm,
usn=self.col._usn,
ts=intTime(),
musn=0,
msg="",
cont=True
)
def changes(self):
"Bundle up small objects."
d = dict(models=self.getModels(),
decks=self.getDecks(),
tags=self.getTags())
if self.lnewer:
d['conf'] = self.getConf()
d['crt'] = self.col.crt
return d
def applyChanges(self, changes):
self.rchg = changes
lchg = self.changes()
# merge our side before returning
self.mergeChanges(lchg, self.rchg)
return lchg
def mergeChanges(self, lchg, rchg):
# then the other objects
self.mergeModels(rchg['models'])
self.mergeDecks(rchg['decks'])
self.mergeTags(rchg['tags'])
if 'conf' in rchg:
self.mergeConf(rchg['conf'])
# this was left out of earlier betas
if 'crt' in rchg:
self.col.crt = rchg['crt']
self.prepareToChunk()
def sanityCheck(self):
if not self.col.basicCheck():
return "failed basic check"
for t in "cards", "notes", "revlog", "graves":
if self.col.db.scalar(
"select count() from %s where usn = -1" % t):
return "%s had usn = -1" % t
for g in self.col.decks.all():
if g['usn'] == -1:
return "deck had usn = -1"
for t, usn in self.col.tags.allItems():
if usn == -1:
return "tag had usn = -1"
found = False
for m in self.col.models.all():
if self.col.server:
# the web upgrade was mistakenly setting usn
if m['usn'] < 0:
m['usn'] = 0
found = True
else:
if m['usn'] == -1:
return "model had usn = -1"
if found:
self.col.models.save()
self.col.sched.reset()
# check for missing parent decks
self.col.sched.deckDueList()
# return summary of deck
return [
list(self.col.sched.counts()),
self.col.db.scalar("select count() from cards"),
self.col.db.scalar("select count() from notes"),
self.col.db.scalar("select count() from revlog"),
self.col.db.scalar("select count() from graves"),
len(self.col.models.all()),
len(self.col.decks.all()),
len(self.col.decks.allConf()),
]
def sanityCheck2(self, client):
server = self.sanityCheck()
if client != server:
return dict(status="bad", c=client, s=server)
return dict(status="ok")
def usnLim(self):
if self.col.server:
return "usn >= %d" % self.minUsn
else:
return "usn = -1"
def finish(self, mod=None):
if not mod:
# server side; we decide new mod time
mod = intTime(1000)
self.col.ls = mod
self.col._usn = self.maxUsn + 1
# ensure we save the mod time even if no changes made
self.col.db.mod = True
self.col.save(mod=mod)
return mod
# Chunked syncing
##########################################################################
def prepareToChunk(self):
self.tablesLeft = ["revlog", "cards", "notes"]
self.cursor = None
def cursorForTable(self, table):
lim = self.usnLim()
x = self.col.db.execute
d = (self.maxUsn, lim)
if table == "revlog":
return x("""
select id, cid, %d, ease, ivl, lastIvl, factor, time, type
from revlog where %s""" % d)
elif table == "cards":
return x("""
select id, nid, did, ord, mod, %d, type, queue, due, ivl, factor, reps,
lapses, left, odue, odid, flags, data from cards where %s""" % d)
else:
return x("""
select id, guid, mid, mod, %d, tags, flds, '', '', flags, data
from notes where %s""" % d)
def chunk(self):
buf = dict(done=False)
lim = 250
while self.tablesLeft and lim:
curTable = self.tablesLeft[0]
if not self.cursor:
self.cursor = self.cursorForTable(curTable)
rows = self.cursor.fetchmany(lim)
fetched = len(rows)
if fetched != lim:
# table is empty
self.tablesLeft.pop(0)
self.cursor = None
# if we're the client, mark the objects as having been sent
if not self.col.server:
self.col.db.execute(
"update %s set usn=? where usn=-1"%curTable,
self.maxUsn)
buf[curTable] = rows
lim -= fetched
if not self.tablesLeft:
buf['done'] = True
return buf
def applyChunk(self, chunk):
if "revlog" in chunk:
self.mergeRevlog(chunk['revlog'])
if "cards" in chunk:
self.mergeCards(chunk['cards'])
if "notes" in chunk:
self.mergeNotes(chunk['notes'])
# Deletions
##########################################################################
def removed(self):
cards = []
notes = []
decks = []
if self.col.server:
curs = self.col.db.execute(
"select oid, type from graves where usn >= ?", self.minUsn)
else:
curs = self.col.db.execute(
"select oid, type from graves where usn = -1")
for oid, type in curs:
if type == REM_CARD:
cards.append(oid)
elif type == REM_NOTE:
notes.append(oid)
else:
decks.append(oid)
if not self.col.server:
self.col.db.execute("update graves set usn=? where usn=-1",
self.maxUsn)
return dict(cards=cards, notes=notes, decks=decks)
def start(self, minUsn, lnewer, graves):
self.maxUsn = self.col._usn
self.minUsn = minUsn
self.lnewer = not lnewer
lgraves = self.removed()
self.remove(graves)
return lgraves
def remove(self, graves):
# pretend to be the server so we don't set usn = -1
wasServer = self.col.server
self.col.server = True
# notes first, so we don't end up with duplicate graves
self.col._remNotes(graves['notes'])
# then cards
self.col.remCards(graves['cards'], notes=False)
# and decks
for oid in graves['decks']:
self.col.decks.rem(oid, childrenToo=False)
self.col.server = wasServer
# Models
##########################################################################
def getModels(self):
if self.col.server:
return [m for m in self.col.models.all() if m['usn'] >= self.minUsn]
else:
mods = [m for m in self.col.models.all() if m['usn'] == -1]
for m in mods:
m['usn'] = self.maxUsn
self.col.models.save()
return mods
def mergeModels(self, rchg):
for r in rchg:
l = self.col.models.get(r['id'])
# if missing locally or server is newer, update
if not l or r['mod'] > l['mod']:
self.col.models.update(r)
# Decks
##########################################################################
def getDecks(self):
if self.col.server:
return [
[g for g in self.col.decks.all() if g['usn'] >= self.minUsn],
[g for g in self.col.decks.allConf() if g['usn'] >= self.minUsn]
]
else:
decks = [g for g in self.col.decks.all() if g['usn'] == -1]
for g in decks:
g['usn'] = self.maxUsn
dconf = [g for g in self.col.decks.allConf() if g['usn'] == -1]
for g in dconf:
g['usn'] = self.maxUsn
self.col.decks.save()
return [decks, dconf]
def mergeDecks(self, rchg):
for r in rchg[0]:
l = self.col.decks.get(r['id'], False)
# if missing locally or server is newer, update
if not l or r['mod'] > l['mod']:
self.col.decks.update(r)
for r in rchg[1]:
try:
l = self.col.decks.getConf(r['id'])
except KeyError:
l = None
# if missing locally or server is newer, update
if not l or r['mod'] > l['mod']:
self.col.decks.updateConf(r)
# Tags
##########################################################################
def getTags(self):
if self.col.server:
return [t for t, usn in self.col.tags.allItems()
if usn >= self.minUsn]
else:
tags = []
for t, usn in self.col.tags.allItems():
if usn == -1:
self.col.tags.tags[t] = self.maxUsn
tags.append(t)
self.col.tags.save()
return tags
def mergeTags(self, tags):
self.col.tags.register(tags, usn=self.maxUsn)
# Cards/notes/revlog
##########################################################################
def mergeRevlog(self, logs):
self.col.db.executemany(
"insert or ignore into revlog values (?,?,?,?,?,?,?,?,?)",
logs)
def newerRows(self, data, table, modIdx):
ids = (r[0] for r in data)
lmods = {}
for id, mod in self.col.db.execute(
"select id, mod from %s where id in %s and %s" % (
table, ids2str(ids), self.usnLim())):
lmods[id] = mod
update = []
for r in data:
if r[0] not in lmods or lmods[r[0]] < r[modIdx]:
update.append(r)
self.col.log(table, data)
return update
def mergeCards(self, cards):
self.col.db.executemany(
"insert or replace into cards values "
"(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
self.newerRows(cards, "cards", 4))
def mergeNotes(self, notes):
rows = self.newerRows(notes, "notes", 3)
self.col.db.executemany(
"insert or replace into notes values (?,?,?,?,?,?,?,?,?,?,?)",
rows)
self.col.updateFieldCache([f[0] for f in rows])
# Col config
##########################################################################
def getConf(self):
return self.col.conf
def mergeConf(self, conf):
self.col.conf = conf
# Local syncing for unit tests
##########################################################################
class LocalServer(Syncer):
# serialize/deserialize payload, so we don't end up sharing objects
# between cols
def applyChanges(self, changes):
l = json.loads; d = json.dumps
return l(d(Syncer.applyChanges(self, l(d(changes)))))
# HTTP syncing tools
##########################################################################
# Calling code should catch the following codes:
# - 501: client needs upgrade
# - 502: ankiweb down
# - 503/504: server too busy
class HttpSyncer(object):
def __init__(self, hkey=None, con=None):
self.hkey = hkey
self.skey = checksum(str(random.random()))[:8]
self.con = con or httpCon()
self.postVars = {}
def assertOk(self, resp):
if resp['status'] != '200':
raise Exception("Unknown response code: %s" % resp['status'])
# Posting data as a file
######################################################################
# We don't want to post the payload as a form var, as the percent-encoding is
# costly. We could send it as a raw post, but more HTTP clients seem to
# support file uploading, so this is the more compatible choice.
def req(self, method, fobj=None, comp=6, badAuthRaises=False):
BOUNDARY="Anki-sync-boundary"
bdry = "--"+BOUNDARY
buf = StringIO()
# post vars
self.postVars['c'] = 1 if comp else 0
for (key, value) in self.postVars.items():
buf.write(bdry + "\r\n")
buf.write(
'Content-Disposition: form-data; name="%s"\r\n\r\n%s\r\n' %
(key, value))
# payload as raw data or json
if fobj:
# header
buf.write(bdry + "\r\n")
buf.write("""\
Content-Disposition: form-data; name="data"; filename="data"\r\n\
Content-Type: application/octet-stream\r\n\r\n""")
# write file into buffer, optionally compressing
if comp:
tgt = gzip.GzipFile(mode="wb", fileobj=buf, compresslevel=comp)
else:
tgt = buf
while 1:
data = fobj.read(65536)
if not data:
if comp:
tgt.close()
break
tgt.write(data)
buf.write('\r\n' + bdry + '--\r\n')
size = buf.tell()
# connection headers
headers = {
'Content-Type': 'multipart/form-data; boundary=%s' % BOUNDARY,
'Content-Length': str(size),
}
body = buf.getvalue()
buf.close()
resp, cont = self.con.request(
self.syncURL()+method, "POST", headers=headers, body=body)
if not badAuthRaises:
# return false if bad auth instead of raising
if resp['status'] == '403':
return False
self.assertOk(resp)
return cont
# Incremental sync over HTTP
######################################################################
class RemoteServer(HttpSyncer):
def __init__(self, hkey):
HttpSyncer.__init__(self, hkey)
def syncURL(self):
if os.getenv("ANKIDEV"):
return "https://l1.ankiweb.net/sync/"
return SYNC_BASE + "sync/"
def hostKey(self, user, pw):
"Returns hkey or none if user/pw incorrect."
self.postVars = dict()
ret = self.req(
"hostKey", StringIO(json.dumps(dict(u=user, p=pw))),
badAuthRaises=False)
if not ret:
# invalid auth
return
self.hkey = json.loads(ret)['key']
return self.hkey
def meta(self):
self.postVars = dict(
k=self.hkey,
s=self.skey,
)
ret = self.req(
"meta", StringIO(json.dumps(dict(
v=SYNC_VER, cv="ankidesktop,%s,%s"%(anki.version, platDesc())))),
badAuthRaises=False)
if not ret:
# invalid auth
return
return json.loads(ret)
def applyChanges(self, **kw):
return self._run("applyChanges", kw)
def start(self, **kw):
return self._run("start", kw)
def chunk(self, **kw):
return self._run("chunk", kw)
def applyChunk(self, **kw):
return self._run("applyChunk", kw)
def sanityCheck2(self, **kw):
return self._run("sanityCheck2", kw)
def finish(self, **kw):
return self._run("finish", kw)
def _run(self, cmd, data):
return json.loads(
self.req(cmd, StringIO(json.dumps(data))))
# Full syncing
##########################################################################
class FullSyncer(HttpSyncer):
def __init__(self, col, hkey, con):
HttpSyncer.__init__(self, hkey, con)
self.postVars = dict(
k=self.hkey,
v="ankidesktop,%s,%s"%(anki.version, platDesc()),
)
self.col = col
def syncURL(self):
if os.getenv("ANKIDEV"):
return "https://l1.ankiweb.net/sync/"
return SYNC_BASE + "sync/"
def download(self):
runHook("sync", "download")
self.col.close()
cont = self.req("download")
tpath = self.col.path + ".tmp"
if cont == "upgradeRequired":
runHook("sync", "upgradeRequired")
return
open(tpath, "wb").write(cont)
# check the received file is ok
d = DB(tpath)
assert d.scalar("pragma integrity_check") == "ok"
d.close()
# overwrite existing collection
os.unlink(self.col.path)
os.rename(tpath, self.col.path)
self.col = None
def upload(self):
"True if upload successful."
runHook("sync", "upload")
# make sure it's ok before we try to upload
if self.col.db.scalar("pragma integrity_check") != "ok":
return False
if not self.col.basicCheck():
return False
# apply some adjustments, then upload
self.col.beforeUpload()
if self.req("upload", open(self.col.path, "rb")) != "OK":
return False
return True
# Media syncing
##########################################################################
#
# About conflicts:
# - to minimize data loss, if both sides are marked for sending and one
# side has been deleted, favour the add
# - if added/changed on both sides, favour the server version on the
# assumption other syncers are in sync with the server
#
class MediaSyncer(object):
def __init__(self, col, server=None):
self.col = col
self.server = server
def sync(self):
# check if there have been any changes
runHook("sync", "findMedia")
self.col.log("findChanges")
self.col.media.findChanges()
# begin session and check if in sync
lastUsn = self.col.media.lastUsn()
ret = self.server.begin()
srvUsn = ret['usn']
if lastUsn == srvUsn and not self.col.media.haveDirty():
return "noChanges"
# loop through and process changes from server
self.col.log("last local usn is %s"%lastUsn)
self.downloadCount = 0
while True:
data = self.server.mediaChanges(lastUsn=lastUsn)
self.col.log("mediaChanges resp count %d"%len(data))
if not data:
break
need = []
lastUsn = data[-1][1]
for fname, rusn, rsum in data:
lsum, ldirty = self.col.media.syncInfo(fname)
self.col.log(
"check: lsum=%s rsum=%s ldirty=%d rusn=%d fname=%s"%(
(lsum and lsum[0:4]),
(rsum and rsum[0:4]),
ldirty,
rusn,
fname))
if rsum:
# added/changed remotely
if not lsum or lsum != rsum:
self.col.log("will fetch")
need.append(fname)
else:
self.col.log("have same already")
ldirty and self.col.media.markClean([fname])
elif lsum:
# deleted remotely
if not ldirty:
self.col.log("delete local")
self.col.media.syncDelete(fname)
else:
# conflict; local add overrides remote delete
self.col.log("conflict; will send")
else:
# deleted both sides
self.col.log("both sides deleted")
ldirty and self.col.media.markClean([fname])
self._downloadFiles(need)
self.col.log("update last usn to %d"%lastUsn)
self.col.media.setLastUsn(lastUsn) # commits
# at this point we're all up to date with the server's changes,
# and we need to send our own
updateConflict = False
toSend = self.col.media.dirtyCount()
while True:
zip, fnames = self.col.media.mediaChangesZip()
if not fnames:
break
runHook("syncMsg", ngettext(
"%d media change to upload", "%d media changes to upload", toSend)
% toSend)
processedCnt, serverLastUsn = self.server.uploadChanges(zip)
self.col.media.markClean(fnames[0:processedCnt])
self.col.log("processed %d, serverUsn %d, clientUsn %d" % (
processedCnt, serverLastUsn, lastUsn
))
if serverLastUsn - processedCnt == lastUsn:
self.col.log("lastUsn in sync, updating local")
lastUsn = serverLastUsn
self.col.media.setLastUsn(serverLastUsn) # commits
else:
self.col.log("concurrent update, skipping usn update")
# commit for markClean
self.col.media.db.commit()
updateConflict = True
toSend -= processedCnt
if updateConflict:
self.col.log("restart sync due to concurrent update")
return self.sync()
lcnt = self.col.media.mediaCount()
ret = self.server.mediaSanity(local=lcnt)
if ret == "OK":
return "OK"
else:
self.col.media.forceResync()
return ret
def _downloadFiles(self, fnames):
self.col.log("%d files to fetch"%len(fnames))
while fnames:
top = fnames[0:SYNC_ZIP_COUNT]
self.col.log("fetch %s"%top)
zipData = self.server.downloadFiles(files=top)
cnt = self.col.media.addFilesFromZip(zipData)
self.downloadCount += cnt
self.col.log("received %d files"%cnt)
fnames = fnames[cnt:]
n = self.downloadCount
runHook("syncMsg", ngettext(
"%d media file downloaded", "%d media files downloaded", n)
% n)
# Remote media syncing
##########################################################################
class RemoteMediaServer(HttpSyncer):
def __init__(self, col, hkey, con):
self.col = col
HttpSyncer.__init__(self, hkey, con)
def syncURL(self):
if os.getenv("ANKIDEV"):
return "https://l1.ankiweb.net/msync/"
return SYNC_MEDIA_BASE
def begin(self):
self.postVars = dict(
k=self.hkey,
v="ankidesktop,%s,%s"%(anki.version, platDesc())
)
ret = self._dataOnly(json.loads(self.req(
"begin", StringIO(json.dumps(dict())))))
self.skey = ret['sk']
return ret
# args: lastUsn
def mediaChanges(self, **kw):
self.postVars = dict(
sk=self.skey,
)
resp = json.loads(
self.req("mediaChanges", StringIO(json.dumps(kw))))
return self._dataOnly(resp)
# args: files
def downloadFiles(self, **kw):
return self.req("downloadFiles", StringIO(json.dumps(kw)))
def uploadChanges(self, zip):
# no compression, as we compress the zip file instead
return self._dataOnly(json.loads(
self.req("uploadChanges", StringIO(zip), comp=0)))
# args: local
def mediaSanity(self, **kw):
return self._dataOnly(json.loads(
self.req("mediaSanity", StringIO(json.dumps(kw)))))
def _dataOnly(self, resp):
if resp['err']:
self.col.log("error returned:%s"%resp['err'])
raise Exception("SyncError:%s"%resp['err'])
return resp['data']
# only for unit tests
def mediatest(self, cmd):
self.postVars = dict(
k=self.hkey,
)
return self._dataOnly(json.loads(
self.req("newMediaTest", StringIO(
json.dumps(dict(cmd=cmd))))))
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/sync.py
|
sync.py
|
from anki.utils import fieldChecksum, intTime, \
joinFields, splitFields, stripHTMLMedia, timestampID, guid64
class Note(object):
def __init__(self, col, model=None, id=None):
assert not (model and id)
self.col = col
if id:
self.id = id
self.load()
else:
self.id = timestampID(col.db, "notes")
self.guid = guid64()
self._model = model
self.mid = model['id']
self.tags = []
self.fields = [""] * len(self._model['flds'])
self.flags = 0
self.data = ""
self._fmap = self.col.models.fieldMap(self._model)
self.scm = self.col.scm
def load(self):
(self.guid,
self.mid,
self.mod,
self.usn,
self.tags,
self.fields,
self.flags,
self.data) = self.col.db.first("""
select guid, mid, mod, usn, tags, flds, flags, data
from notes where id = ?""", self.id)
self.fields = splitFields(self.fields)
self.tags = self.col.tags.split(self.tags)
self._model = self.col.models.get(self.mid)
self._fmap = self.col.models.fieldMap(self._model)
self.scm = self.col.scm
def flush(self, mod=None):
"If fields or tags have changed, write changes to disk."
assert self.scm == self.col.scm
self._preFlush()
sfld = stripHTMLMedia(self.fields[self.col.models.sortIdx(self._model)])
tags = self.stringTags()
fields = self.joinedFields()
if not mod and self.col.db.scalar(
"select 1 from notes where id = ? and tags = ? and flds = ?",
self.id, tags, fields):
return
csum = fieldChecksum(self.fields[0])
self.mod = mod if mod else intTime()
self.usn = self.col.usn()
res = self.col.db.execute("""
insert or replace into notes values (?,?,?,?,?,?,?,?,?,?,?)""",
self.id, self.guid, self.mid,
self.mod, self.usn, tags,
fields, sfld, csum, self.flags,
self.data)
self.col.tags.register(self.tags)
self._postFlush()
def joinedFields(self):
return joinFields(self.fields)
def cards(self):
return [self.col.getCard(id) for id in self.col.db.list(
"select id from cards where nid = ? order by ord", self.id)]
def model(self):
return self._model
# Dict interface
##################################################
def keys(self):
return self._fmap.keys()
def values(self):
return self.fields
def items(self):
return [(f['name'], self.fields[ord])
for ord, f in sorted(self._fmap.values())]
def _fieldOrd(self, key):
try:
return self._fmap[key][0]
except:
raise KeyError(key)
def __getitem__(self, key):
return self.fields[self._fieldOrd(key)]
def __setitem__(self, key, value):
self.fields[self._fieldOrd(key)] = value
def __contains__(self, key):
return key in self._fmap.keys()
# Tags
##################################################
def hasTag(self, tag):
return self.col.tags.inList(tag, self.tags)
def stringTags(self):
return self.col.tags.join(self.col.tags.canonify(self.tags))
def setTagsFromStr(self, str):
self.tags = self.col.tags.split(str)
def delTag(self, tag):
rem = []
for t in self.tags:
if t.lower() == tag.lower():
rem.append(t)
for r in rem:
self.tags.remove(r)
def addTag(self, tag):
# duplicates will be stripped on save
self.tags.append(tag)
# Unique/duplicate check
##################################################
def dupeOrEmpty(self):
"1 if first is empty; 2 if first is a duplicate, False otherwise."
val = self.fields[0]
if not val.strip():
return 1
csum = fieldChecksum(val)
# find any matching csums and compare
for flds in self.col.db.list(
"select flds from notes where csum = ? and id != ? and mid = ?",
csum, self.id or 0, self.mid):
if stripHTMLMedia(
splitFields(flds)[0]) == stripHTMLMedia(self.fields[0]):
return 2
return False
# Flushing cloze notes
##################################################
def _preFlush(self):
# have we been added yet?
self.newlyAdded = not self.col.db.scalar(
"select 1 from cards where nid = ?", self.id)
def _postFlush(self):
# generate missing cards
if not self.newlyAdded:
rem = self.col.genCards([self.id])
# popping up a dialog while editing is confusing; instead we can
# document that the user should open the templates window to
# garbage collect empty cards
#self.col.remEmptyCards(ids)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/notes.py
|
notes.py
|
from anki.lang import _
from anki.consts import MODEL_CLOZE
models = []
# Basic
##########################################################################
def addBasicModel(col):
mm = col.models
m = mm.new(_("Basic"))
fm = mm.newField(_("Front"))
mm.addField(m, fm)
fm = mm.newField(_("Back"))
mm.addField(m, fm)
t = mm.newTemplate(_("Card 1"))
t['qfmt'] = "{{"+_("Front")+"}}"
t['afmt'] = "{{FrontSide}}\n\n<hr id=answer>\n\n"+"{{"+_("Back")+"}}"
mm.addTemplate(m, t)
mm.add(m)
return m
models.append((lambda: _("Basic"), addBasicModel))
# Forward & Reverse
##########################################################################
def addForwardReverse(col):
mm = col.models
m = addBasicModel(col)
m['name'] = _("Basic (and reversed card)")
t = mm.newTemplate(_("Card 2"))
t['qfmt'] = "{{"+_("Back")+"}}"
t['afmt'] = "{{FrontSide}}\n\n<hr id=answer>\n\n"+"{{"+_("Front")+"}}"
mm.addTemplate(m, t)
return m
models.append((lambda: _("Basic (and reversed card)"), addForwardReverse))
# Forward & Optional Reverse
##########################################################################
def addForwardOptionalReverse(col):
mm = col.models
m = addBasicModel(col)
m['name'] = _("Basic (optional reversed card)")
av = _("Add Reverse")
fm = mm.newField(av)
mm.addField(m, fm)
t = mm.newTemplate(_("Card 2"))
t['qfmt'] = "{{#%s}}{{%s}}{{/%s}}" % (av, _("Back"), av)
t['afmt'] = "{{FrontSide}}\n\n<hr id=answer>\n\n"+"{{"+_("Front")+"}}"
mm.addTemplate(m, t)
return m
models.append((lambda: _("Basic (optional reversed card)"),
addForwardOptionalReverse))
# Cloze
##########################################################################
def addClozeModel(col):
mm = col.models
m = mm.new(_("Cloze"))
m['type'] = MODEL_CLOZE
txt = _("Text")
fm = mm.newField(txt)
mm.addField(m, fm)
fm = mm.newField(_("Extra"))
mm.addField(m, fm)
t = mm.newTemplate(_("Cloze"))
fmt = "{{cloze:%s}}" % txt
m['css'] += """
.cloze {
font-weight: bold;
color: blue;
}"""
t['qfmt'] = fmt
t['afmt'] = fmt + "<br>\n{{%s}}" % _("Extra")
mm.addTemplate(m, t)
mm.add(m)
return m
models.append((lambda: _("Cloze"), addClozeModel))
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/stdmodels.py
|
stdmodels.py
|
from __future__ import division
import re
import os
import random
import time
import math
import htmlentitydefs
import subprocess
import tempfile
import shutil
import string
import sys
import locale
from hashlib import sha1
import platform
import traceback
from anki.lang import _, ngettext
if sys.version_info[1] < 5:
def format_string(a, b):
return a % b
locale.format_string = format_string
try:
import simplejson as json
# make sure simplejson's loads() always returns unicode
# we don't try to support .load()
origLoads = json.loads
def loads(s, *args, **kwargs):
if not isinstance(s, unicode):
s = unicode(s, "utf8")
return origLoads(s, *args, **kwargs)
json.loads = loads
except ImportError:
import json
# Time handling
##############################################################################
def intTime(scale=1):
"The time in integer seconds. Pass scale=1000 to get milliseconds."
return int(time.time()*scale)
timeTable = {
"years": lambda n: ngettext("%s year", "%s years", n),
"months": lambda n: ngettext("%s month", "%s months", n),
"days": lambda n: ngettext("%s day", "%s days", n),
"hours": lambda n: ngettext("%s hour", "%s hours", n),
"minutes": lambda n: ngettext("%s minute", "%s minutes", n),
"seconds": lambda n: ngettext("%s second", "%s seconds", n),
}
afterTimeTable = {
"years": lambda n: ngettext("%s year<!--after-->", "%s years<!--after-->", n),
"months": lambda n: ngettext("%s month<!--after-->", "%s months<!--after-->", n),
"days": lambda n: ngettext("%s day<!--after-->", "%s days<!--after-->", n),
"hours": lambda n: ngettext("%s hour<!--after-->", "%s hours<!--after-->", n),
"minutes": lambda n: ngettext("%s minute<!--after-->", "%s minutes<!--after-->", n),
"seconds": lambda n: ngettext("%s second<!--after-->", "%s seconds<!--after-->", n),
}
def shortTimeFmt(type):
return {
"years": _("%sy"),
"months": _("%smo"),
"days": _("%sd"),
"hours": _("%sh"),
"minutes": _("%sm"),
"seconds": _("%ss"),
}[type]
def fmtTimeSpan(time, pad=0, point=0, short=False, after=False, unit=99):
"Return a string representing a time span (eg '2 days')."
(type, point) = optimalPeriod(time, point, unit)
time = convertSecondsTo(time, type)
if not point:
time = int(round(time))
if short:
fmt = shortTimeFmt(type)
else:
if after:
fmt = afterTimeTable[type](_pluralCount(time, point))
else:
fmt = timeTable[type](_pluralCount(time, point))
timestr = "%(a)d.%(b)df" % {'a': pad, 'b': point}
return locale.format_string("%" + (fmt % timestr), time)
def optimalPeriod(time, point, unit):
if abs(time) < 60 or unit < 1:
type = "seconds"
point -= 1
elif abs(time) < 3600 or unit < 2:
type = "minutes"
elif abs(time) < 60 * 60 * 24 or unit < 3:
type = "hours"
elif abs(time) < 60 * 60 * 24 * 30 or unit < 4:
type = "days"
elif abs(time) < 60 * 60 * 24 * 365 or unit < 5:
type = "months"
point += 1
else:
type = "years"
point += 1
return (type, max(point, 0))
def convertSecondsTo(seconds, type):
if type == "seconds":
return seconds
elif type == "minutes":
return seconds / 60
elif type == "hours":
return seconds / 3600
elif type == "days":
return seconds / 86400
elif type == "months":
return seconds / 2592000
elif type == "years":
return seconds / 31536000
assert False
def _pluralCount(time, point):
if point:
return 2
return math.floor(time)
# Locale
##############################################################################
def fmtPercentage(float_value, point=1):
"Return float with percentage sign"
fmt = '%' + "0.%(b)df" % {'b': point}
return locale.format_string(fmt, float_value) + "%"
def fmtFloat(float_value, point=1):
"Return a string with decimal separator according to current locale"
fmt = '%' + "0.%(b)df" % {'b': point}
return locale.format_string(fmt, float_value)
# HTML
##############################################################################
reStyle = re.compile("(?s)<style.*?>.*?</style>")
reScript = re.compile("(?s)<script.*?>.*?</script>")
reTag = re.compile("<.*?>")
reEnts = re.compile("&#?\w+;")
reMedia = re.compile("<img[^>]+src=[\"']?([^\"'>]+)[\"']?[^>]*>")
def stripHTML(s):
s = reStyle.sub("", s)
s = reScript.sub("", s)
s = reTag.sub("", s)
s = entsToTxt(s)
return s
def stripHTMLMedia(s):
"Strip HTML but keep media filenames"
s = reMedia.sub(" \\1 ", s)
return stripHTML(s)
def minimizeHTML(s):
"Correct Qt's verbose bold/underline/etc."
s = re.sub('<span style="font-weight:600;">(.*?)</span>', '<b>\\1</b>',
s)
s = re.sub('<span style="font-style:italic;">(.*?)</span>', '<i>\\1</i>',
s)
s = re.sub('<span style="text-decoration: underline;">(.*?)</span>',
'<u>\\1</u>', s)
return s
def entsToTxt(html):
# entitydefs defines nbsp as \xa0 instead of a standard space, so we
# replace it first
html = html.replace(" ", " ")
def fixup(m):
text = m.group(0)
if text[:2] == "&#":
# character reference
try:
if text[:3] == "&#x":
return unichr(int(text[3:-1], 16))
else:
return unichr(int(text[2:-1]))
except ValueError:
pass
else:
# named entity
try:
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]])
except KeyError:
pass
return text # leave as is
return reEnts.sub(fixup, html)
# IDs
##############################################################################
def hexifyID(id):
return "%x" % int(id)
def dehexifyID(id):
return int(id, 16)
def ids2str(ids):
"""Given a list of integers, return a string '(int1,int2,...)'."""
return "(%s)" % ",".join(str(i) for i in ids)
def timestampID(db, table):
"Return a non-conflicting timestamp for table."
# be careful not to create multiple objects without flushing them, or they
# may share an ID.
t = intTime(1000)
while db.scalar("select id from %s where id = ?" % table, t):
t += 1
return t
def maxID(db):
"Return the first safe ID to use."
now = intTime(1000)
for tbl in "cards", "notes":
now = max(now, db.scalar(
"select max(id) from %s" % tbl))
return now + 1
# used in ankiweb
def base62(num, extra=""):
s = string; table = s.ascii_letters + s.digits + extra
buf = ""
while num:
num, i = divmod(num, len(table))
buf = table[i] + buf
return buf
_base91_extra_chars = "!#$%&()*+,-./:;<=>?@[]^_`{|}~"
def base91(num):
# all printable characters minus quotes, backslash and separators
return base62(num, _base91_extra_chars)
def guid64():
"Return a base91-encoded 64bit random number."
return base91(random.randint(0, 2**64-1))
# increment a guid by one, for note type conflicts
def incGuid(guid):
return _incGuid(guid[::-1])[::-1]
def _incGuid(guid):
s = string; table = s.ascii_letters + s.digits + _base91_extra_chars
idx = table.index(guid[0])
if idx + 1 == len(table):
# overflow
guid = table[0] + _incGuid(guid[1:])
else:
guid = table[idx+1] + guid[1:]
return guid
# Fields
##############################################################################
def joinFields(list):
return "\x1f".join(list)
def splitFields(string):
return string.split("\x1f")
# Checksums
##############################################################################
def checksum(data):
if isinstance(data, unicode):
data = data.encode("utf-8")
return sha1(data).hexdigest()
def fieldChecksum(data):
# 32 bit unsigned number from first 8 digits of sha1 hash
return int(checksum(stripHTMLMedia(data).encode("utf-8"))[:8], 16)
# Temp files
##############################################################################
_tmpdir = None
def tmpdir():
"A reusable temp folder which we clean out on each program invocation."
global _tmpdir
if not _tmpdir:
def cleanup():
shutil.rmtree(_tmpdir)
import atexit
atexit.register(cleanup)
_tmpdir = unicode(os.path.join(tempfile.gettempdir(), "anki_temp"), \
sys.getfilesystemencoding())
if not os.path.exists(_tmpdir):
os.mkdir(_tmpdir)
return _tmpdir
def tmpfile(prefix="", suffix=""):
(fd, name) = tempfile.mkstemp(dir=tmpdir(), prefix=prefix, suffix=suffix)
os.close(fd)
return name
def namedtmp(name, rm=True):
"Return tmpdir+name. Deletes any existing file."
path = os.path.join(tmpdir(), name)
if rm:
try:
os.unlink(path)
except (OSError, IOError):
pass
return path
# Cmd invocation
##############################################################################
def call(argv, wait=True, **kwargs):
"Execute a command. If WAIT, return exit code."
# ensure we don't open a separate window for forking process on windows
if isWin:
si = subprocess.STARTUPINFO()
try:
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
except:
si.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW
else:
si = None
# run
try:
o = subprocess.Popen(argv, startupinfo=si, **kwargs)
except OSError:
# command not found
return -1
# wait for command to finish
if wait:
while 1:
try:
ret = o.wait()
except OSError:
# interrupted system call
continue
break
else:
ret = 0
return ret
# OS helpers
##############################################################################
isMac = sys.platform.startswith("darwin")
isWin = sys.platform.startswith("win32")
invalidFilenameChars = ":*?\"<>|"
def invalidFilename(str, dirsep=True):
for c in invalidFilenameChars:
if c in str:
return c
if (dirsep or isWin) and "/" in str:
return "/"
elif (dirsep or not isWin) and "\\" in str:
return "\\"
elif str.strip().startswith("."):
return "."
def platDesc():
# we may get an interrupted system call, so try this in a loop
n = 0
theos = "unknown"
while n < 100:
n += 1
try:
system = platform.system()
if isMac:
theos = "mac:%s" % (platform.mac_ver()[0])
elif isWin:
theos = "win:%s" % (platform.win32_ver()[0])
elif system == "Linux":
dist = platform.dist()
theos = "lin:%s:%s" % (dist[0], dist[1])
else:
theos = system
break
except:
continue
return theos
# Debugging
##############################################################################
class TimedLog(object):
def __init__(self):
self._last = time.time()
def log(self, s):
path, num, fn, y = traceback.extract_stack(limit=2)[0]
sys.stderr.write("%5dms: %s(): %s\n" % ((time.time() - self._last)*1000, fn, s))
self._last = time.time()
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/utils.py
|
utils.py
|
import os, sys, re
import gettext
import threading
langs = [
(u"Afrikaans", "af"),
(u"Bahasa Melayu", "ms"),
(u"Dansk", "da"),
(u"Deutsch", "de"),
(u"Eesti", "et"),
(u"English", "en"),
(u"Español", "es"),
(u"Esperanto", "eo"),
(u"Français", "fr"),
(u"Galego", "gl"),
(u"Italiano", "it"),
(u"Lenga d'òc", "oc"),
(u"Magyar", "hu"),
(u"Nederlands","nl"),
(u"Norsk","nb"),
(u"Occitan","oc"),
(u"Plattdüütsch", "nds"),
(u"Polski", "pl"),
(u"Português Brasileiro", "pt_BR"),
(u"Português", "pt"),
(u"Româneşte", "ro"),
(u"Slovenščina", "sl"),
(u"Suomi", "fi"),
(u"Svenska", "sv"),
(u"Tiếng Việt", "vi"),
(u"Türkçe", "tr"),
(u"Čeština", "cs"),
(u"Ελληνικά", "el"),
(u"босански", "bs"),
(u"Български", "bg"),
(u"Монгол хэл","mn"),
(u"русский язык", "ru"),
(u"Српски", "sr"),
(u"українська мова", "uk"),
(u"עִבְרִית", "he"),
(u"العربية", "ar"),
(u"فارسی", "fa"),
(u"ภาษาไทย", "th"),
(u"日本語", "ja"),
(u"简体中文", "zh_CN"),
(u"繁體中文", "zh_TW"),
(u"한국어", "ko"),
]
threadLocal = threading.local()
# global defaults
currentLang = None
currentTranslation = None
def localTranslation():
"Return the translation local to this thread, or the default."
if getattr(threadLocal, 'currentTranslation', None):
return threadLocal.currentTranslation
else:
return currentTranslation
def _(str):
return localTranslation().ugettext(str)
def ngettext(single, plural, n):
return localTranslation().ungettext(single, plural, n)
def langDir():
dir = os.path.join(os.path.dirname(
os.path.abspath(__file__)), "locale")
if not os.path.isdir(dir):
dir = os.path.join(os.path.dirname(sys.argv[0]), "locale")
if not os.path.isdir(dir):
dir = "/usr/share/anki/locale"
return dir
def setLang(lang, local=True):
trans = gettext.translation(
'anki', langDir(), languages=[lang], fallback=True)
if local:
threadLocal.currentLang = lang
threadLocal.currentTranslation = trans
else:
global currentLang, currentTranslation
currentLang = lang
currentTranslation = trans
def getLang():
"Return the language local to this thread, or the default."
if getattr(threadLocal, 'currentLang', None):
return threadLocal.currentLang
else:
return currentLang
def noHint(str):
"Remove translation hint from end of string."
return re.sub("(^.*?)( ?\(.+?\))?$", "\\1", str)
if not currentTranslation:
setLang("en_US", local=False)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/lang.py
|
lang.py
|
import os
import time
try:
from pysqlite2 import dbapi2 as sqlite
vi = sqlite.version_info
if vi[0] > 2 or vi[1] > 6:
# latest pysqlite breaks anki
raise ImportError()
except ImportError:
from sqlite3 import dbapi2 as sqlite
Error = sqlite.Error
class DB(object):
def __init__(self, path, text=None, timeout=0):
encpath = path
if isinstance(encpath, unicode):
encpath = path.encode("utf-8")
self._db = sqlite.connect(encpath, timeout=timeout)
if text:
self._db.text_factory = text
self._path = path
self.echo = os.environ.get("DBECHO")
self.mod = False
def execute(self, sql, *a, **ka):
s = sql.strip().lower()
# mark modified?
for stmt in "insert", "update", "delete":
if s.startswith(stmt):
self.mod = True
t = time.time()
if ka:
# execute("...where id = :id", id=5)
res = self._db.execute(sql, ka)
else:
# execute("...where id = ?", 5)
res = self._db.execute(sql, a)
if self.echo:
#print a, ka
print sql, "%0.3fms" % ((time.time() - t)*1000)
if self.echo == "2":
print a, ka
return res
def executemany(self, sql, l):
self.mod = True
t = time.time()
self._db.executemany(sql, l)
if self.echo:
print sql, "%0.3fms" % ((time.time() - t)*1000)
if self.echo == "2":
print l
def commit(self):
t = time.time()
self._db.commit()
if self.echo:
print "commit %0.3fms" % ((time.time() - t)*1000)
def executescript(self, sql):
self.mod = True
if self.echo:
print sql
self._db.executescript(sql)
def rollback(self):
self._db.rollback()
def scalar(self, *a, **kw):
res = self.execute(*a, **kw).fetchone()
if res:
return res[0]
return None
def all(self, *a, **kw):
return self.execute(*a, **kw).fetchall()
def first(self, *a, **kw):
c = self.execute(*a, **kw)
res = c.fetchone()
c.close()
return res
def list(self, *a, **kw):
return [x[0] for x in self.execute(*a, **kw)]
def close(self):
self._db.close()
def set_progress_handler(self, *args):
self._db.set_progress_handler(*args)
def __enter__(self):
self._db.execute("begin")
return self
def __exit__(self, exc_type, *args):
self._db.close()
def totalChanges(self):
return self._db.total_changes
def interrupt(self):
self._db.interrupt()
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/db.py
|
db.py
|
import re
import sre_constants
from anki.utils import ids2str, splitFields, joinFields, intTime, fieldChecksum, stripHTMLMedia
from anki.consts import *
from anki.hooks import *
# Find
##########################################################################
class Finder(object):
def __init__(self, col):
self.col = col
self.search = dict(
added=self._findAdded,
card=self._findTemplate,
deck=self._findDeck,
mid=self._findMid,
nid=self._findNids,
cid=self._findCids,
note=self._findModel,
prop=self._findProp,
rated=self._findRated,
tag=self._findTag,
dupe=self._findDupes,
)
self.search['is'] = self._findCardState
runHook("search", self.search)
def findCards(self, query, order=False):
"Return a list of card ids for QUERY."
tokens = self._tokenize(query)
preds, args = self._where(tokens)
if preds is None:
return []
order, rev = self._order(order)
sql = self._query(preds, order)
try:
res = self.col.db.list(sql, *args)
except:
# invalid grouping
return []
if rev:
res.reverse()
return res
def findNotes(self, query):
tokens = self._tokenize(query)
preds, args = self._where(tokens)
if preds is None:
return []
if preds:
preds = "(" + preds + ")"
else:
preds = "1"
sql = """
select distinct(n.id) from cards c, notes n where c.nid=n.id and """+preds
try:
res = self.col.db.list(sql, *args)
except:
# invalid grouping
return []
return res
# Tokenizing
######################################################################
def _tokenize(self, query):
inQuote = False
tokens = []
token = ""
for c in query:
# quoted text
if c in ("'", '"'):
if inQuote:
if c == inQuote:
inQuote = False
else:
token += c
elif token:
# quotes are allowed to start directly after a :
if token[-1] == ":":
inQuote = c
else:
token += c
else:
inQuote = c
# separator (space and ideographic space)
elif c in (" ", u'\u3000'):
if inQuote:
token += c
elif token:
# space marks token finished
tokens.append(token)
token = ""
# nesting
elif c in ("(", ")"):
if inQuote:
token += c
else:
if c == ")" and token:
tokens.append(token)
token = ""
tokens.append(c)
# negation
elif c == "-":
if token:
token += c
elif not tokens or tokens[-1] != "-":
tokens.append("-")
# normal character
else:
token += c
# if we finished in a token, add it
if token:
tokens.append(token)
return tokens
# Query building
######################################################################
def _where(self, tokens):
# state and query
s = dict(isnot=False, isor=False, join=False, q="", bad=False)
args = []
def add(txt, wrap=True):
# failed command?
if not txt:
# if it was to be negated then we can just ignore it
if s['isnot']:
s['isnot'] = False
return
else:
s['bad'] = True
return
elif txt == "skip":
return
# do we need a conjunction?
if s['join']:
if s['isor']:
s['q'] += " or "
s['isor'] = False
else:
s['q'] += " and "
if s['isnot']:
s['q'] += " not "
s['isnot'] = False
if wrap:
txt = "(" + txt + ")"
s['q'] += txt
s['join'] = True
for token in tokens:
if s['bad']:
return None, None
# special tokens
if token == "-":
s['isnot'] = True
elif token.lower() == "or":
s['isor'] = True
elif token == "(":
add(token, wrap=False)
s['join'] = False
elif token == ")":
s['q'] += ")"
# commands
elif ":" in token:
cmd, val = token.split(":", 1)
cmd = cmd.lower()
if cmd in self.search:
add(self.search[cmd]((val, args)))
else:
add(self._findField(cmd, val))
# normal text search
else:
add(self._findText(token, args))
if s['bad']:
return None, None
return s['q'], args
def _query(self, preds, order):
# can we skip the note table?
if "n." not in preds and "n." not in order:
sql = "select c.id from cards c where "
else:
sql = "select c.id from cards c, notes n where c.nid=n.id and "
# combine with preds
if preds:
sql += "(" + preds + ")"
else:
sql += "1"
# order
if order:
sql += " " + order
return sql
# Ordering
######################################################################
def _order(self, order):
if not order:
return "", False
elif order is not True:
# custom order string provided
return " order by " + order, False
# use deck default
type = self.col.conf['sortType']
sort = None
if type.startswith("note"):
if type == "noteCrt":
sort = "n.id, c.ord"
elif type == "noteMod":
sort = "n.mod, c.ord"
elif type == "noteFld":
sort = "n.sfld collate nocase, c.ord"
elif type.startswith("card"):
if type == "cardMod":
sort = "c.mod"
elif type == "cardReps":
sort = "c.reps"
elif type == "cardDue":
sort = "c.type, c.due"
elif type == "cardEase":
sort = "c.factor"
elif type == "cardLapses":
sort = "c.lapses"
elif type == "cardIvl":
sort = "c.ivl"
if not sort:
# deck has invalid sort order; revert to noteCrt
sort = "n.id, c.ord"
return " order by " + sort, self.col.conf['sortBackwards']
# Commands
######################################################################
def _findTag(self, (val, args)):
if val == "none":
return 'n.tags = ""'
val = val.replace("*", "%")
if not val.startswith("%"):
val = "% " + val
if not val.endswith("%"):
val += " %"
args.append(val)
return "n.tags like ?"
def _findCardState(self, (val, args)):
if val in ("review", "new", "learn"):
if val == "review":
n = 2
elif val == "new":
n = 0
else:
return "queue in (1, 3)"
return "type = %d" % n
elif val == "suspended":
return "c.queue = -1"
elif val == "buried":
return "c.queue = -2"
elif val == "due":
return """
(c.queue in (2,3) and c.due <= %d) or
(c.queue = 1 and c.due <= %d)""" % (
self.col.sched.today, self.col.sched.dayCutoff)
def _findRated(self, (val, args)):
# days(:optional_ease)
r = val.split(":")
try:
days = int(r[0])
except ValueError:
return
days = min(days, 31)
# ease
ease = ""
if len(r) > 1:
if r[1] not in ("1", "2", "3", "4"):
return
ease = "and ease=%s" % r[1]
cutoff = (self.col.sched.dayCutoff - 86400*days)*1000
return ("c.id in (select cid from revlog where id>%d %s)" %
(cutoff, ease))
def _findAdded(self, (val, args)):
try:
days = int(val)
except ValueError:
return
cutoff = (self.col.sched.dayCutoff - 86400*days)*1000
return "c.id > %d" % cutoff
def _findProp(self, (val, args)):
# extract
m = re.match("(^.+?)(<=|>=|!=|=|<|>)(.+?$)", val)
if not m:
return
prop, cmp, val = m.groups()
prop = prop.lower()
# is val valid?
try:
if prop == "ease":
val = float(val)
else:
val = int(val)
except ValueError:
return
# is prop valid?
if prop not in ("due", "ivl", "reps", "lapses", "ease"):
return
# query
q = []
if prop == "due":
val += self.col.sched.today
# only valid for review/daily learning
q.append("(c.queue in (2,3))")
elif prop == "ease":
prop = "factor"
val = int(val*1000)
q.append("(%s %s %s)" % (prop, cmp, val))
return " and ".join(q)
def _findText(self, val, args):
val = val.replace("*", "%")
args.append("%"+val+"%")
args.append("%"+val+"%")
return "(n.sfld like ? escape '\\' or n.flds like ? escape '\\')"
def _findNids(self, (val, args)):
if re.search("[^0-9,]", val):
return
return "n.id in (%s)" % val
def _findCids(self, (val, args)):
if re.search("[^0-9,]", val):
return
return "c.id in (%s)" % val
def _findMid(self, (val, args)):
if re.search("[^0-9]", val):
return
return "n.mid = %s" % val
def _findModel(self, (val, args)):
ids = []
val = val.lower()
for m in self.col.models.all():
if m['name'].lower() == val:
ids.append(m['id'])
return "n.mid in %s" % ids2str(ids)
def _findDeck(self, (val, args)):
# if searching for all decks, skip
if val == "*":
return "skip"
# deck types
elif val == "filtered":
return "c.odid"
def dids(did):
if not did:
return None
return [did] + [a[1] for a in self.col.decks.children(did)]
# current deck?
ids = None
if val.lower() == "current":
ids = dids(self.col.decks.current()['id'])
elif "*" not in val:
# single deck
ids = dids(self.col.decks.id(val, create=False))
else:
# wildcard
ids = set()
# should use re.escape in the future
val = val.replace("*", ".*")
val = val.replace("+", "\\+")
for d in self.col.decks.all():
if re.match("(?i)"+val, d['name']):
ids.update(dids(d['id']))
if not ids:
return
sids = ids2str(ids)
return "c.did in %s or c.odid in %s" % (sids, sids)
def _findTemplate(self, (val, args)):
# were we given an ordinal number?
try:
num = int(val) - 1
except:
num = None
if num is not None:
return "c.ord = %d" % num
# search for template names
lims = []
for m in self.col.models.all():
for t in m['tmpls']:
if t['name'].lower() == val.lower():
if m['type'] == MODEL_CLOZE:
# if the user has asked for a cloze card, we want
# to give all ordinals, so we just limit to the
# model instead
lims.append("(n.mid = %s)" % m['id'])
else:
lims.append("(n.mid = %s and c.ord = %s)" % (
m['id'], t['ord']))
return " or ".join(lims)
def _findField(self, field, val):
field = field.lower()
val = val.replace("*", "%")
# find models that have that field
mods = {}
for m in self.col.models.all():
for f in m['flds']:
if f['name'].lower() == field:
mods[str(m['id'])] = (m, f['ord'])
if not mods:
# nothing has that field
return
# gather nids
regex = re.escape(val).replace("\\_", ".").replace("\\%", ".*")
nids = []
for (id,mid,flds) in self.col.db.execute("""
select id, mid, flds from notes
where mid in %s and flds like ? escape '\\'""" % (
ids2str(mods.keys())),
"%"+val+"%"):
flds = splitFields(flds)
ord = mods[str(mid)][1]
strg = flds[ord]
try:
if re.search("(?si)^"+regex+"$", strg):
nids.append(id)
except sre_constants.error:
return
if not nids:
return "0"
return "n.id in %s" % ids2str(nids)
def _findDupes(self, (val, args)):
# caller must call stripHTMLMedia on passed val
try:
mid, val = val.split(",", 1)
except OSError:
return
csum = fieldChecksum(val)
nids = []
for nid, flds in self.col.db.execute(
"select id, flds from notes where mid=? and csum=?",
mid, csum):
if stripHTMLMedia(splitFields(flds)[0]) == val:
nids.append(nid)
return "n.id in %s" % ids2str(nids)
# Find and replace
##########################################################################
def findReplace(col, nids, src, dst, regex=False, field=None, fold=True):
"Find and replace fields in a note."
mmap = {}
if field:
for m in col.models.all():
for f in m['flds']:
if f['name'] == field:
mmap[str(m['id'])] = f['ord']
if not mmap:
return 0
# find and gather replacements
if not regex:
src = re.escape(src)
if fold:
src = "(?i)"+src
regex = re.compile(src)
def repl(str):
return re.sub(regex, dst, str)
d = []
snids = ids2str(nids)
nids = []
for nid, mid, flds in col.db.execute(
"select id, mid, flds from notes where id in "+snids):
origFlds = flds
# does it match?
sflds = splitFields(flds)
if field:
try:
ord = mmap[str(mid)]
sflds[ord] = repl(sflds[ord])
except KeyError:
# note doesn't have that field
continue
else:
for c in range(len(sflds)):
sflds[c] = repl(sflds[c])
flds = joinFields(sflds)
if flds != origFlds:
nids.append(nid)
d.append(dict(nid=nid,flds=flds,u=col.usn(),m=intTime()))
if not d:
return 0
# replace
col.db.executemany(
"update notes set flds=:flds,mod=:m,usn=:u where id=:nid", d)
col.updateFieldCache(nids)
col.genCards(nids)
return len(d)
def fieldNames(col, downcase=True):
fields = set()
names = []
for m in col.models.all():
for f in m['flds']:
if f['name'].lower() not in fields:
names.append(f['name'])
fields.add(f['name'].lower())
if downcase:
return list(fields)
return names
# Find duplicates
##########################################################################
# returns array of ("dupestr", [nids])
def findDupes(col, fieldName, search=""):
# limit search to notes with applicable field name
if search:
search = "("+search+") "
search += "'%s:*'" % fieldName
# go through notes
vals = {}
dupes = []
fields = {}
def ordForMid(mid):
if mid not in fields:
model = col.models.get(mid)
for c, f in enumerate(model['flds']):
if f['name'].lower() == fieldName.lower():
fields[mid] = c
break
return fields[mid]
for nid, mid, flds in col.db.all(
"select id, mid, flds from notes where id in "+ids2str(
col.findNotes(search))):
flds = splitFields(flds)
ord = ordForMid(mid)
if ord is None:
continue
val = flds[ord]
val = stripHTMLMedia(val)
# empty does not count as duplicate
if not val:
continue
if val not in vals:
vals[val] = []
vals[val].append(nid)
if len(vals[val]) == 2:
dupes.append((val, vals[val]))
return dupes
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/find.py
|
find.py
|
import os
import copy
import re
from anki.lang import _
from anki.utils import intTime, json
from anki.db import DB
from anki.collection import _Collection
from anki.consts import *
from anki.stdmodels import addBasicModel, addClozeModel, addForwardReverse, \
addForwardOptionalReverse
def Collection(path, lock=True, server=False, sync=True, log=False):
"Open a new or existing collection. Path must be unicode."
assert path.endswith(".anki2")
path = os.path.abspath(path)
create = not os.path.exists(path)
if create:
base = os.path.basename(path)
for c in ("/", ":", "\\"):
assert c not in base
# connect
db = DB(path)
if create:
ver = _createDB(db)
else:
ver = _upgradeSchema(db)
db.execute("pragma temp_store = memory")
if sync:
db.execute("pragma cache_size = 10000")
db.execute("pragma journal_mode = wal")
else:
db.execute("pragma synchronous = off")
# add db to col and do any remaining upgrades
col = _Collection(db, server, log)
if ver < SCHEMA_VERSION:
_upgrade(col, ver)
elif create:
# add in reverse order so basic is default
addClozeModel(col)
addForwardOptionalReverse(col)
addForwardReverse(col)
addBasicModel(col)
col.save()
if lock:
col.lock()
return col
def _upgradeSchema(db):
ver = db.scalar("select ver from col")
if ver == SCHEMA_VERSION:
return ver
# add odid to cards, edue->odue
######################################################################
if db.scalar("select ver from col") == 1:
db.execute("alter table cards rename to cards2")
_addSchema(db, setColConf=False)
db.execute("""
insert into cards select
id, nid, did, ord, mod, usn, type, queue, due, ivl, factor, reps, lapses,
left, edue, 0, flags, data from cards2""")
db.execute("drop table cards2")
db.execute("update col set ver = 2")
_updateIndices(db)
# remove did from notes
######################################################################
if db.scalar("select ver from col") == 2:
db.execute("alter table notes rename to notes2")
_addSchema(db, setColConf=False)
db.execute("""
insert into notes select
id, guid, mid, mod, usn, tags, flds, sfld, csum, flags, data from notes2""")
db.execute("drop table notes2")
db.execute("update col set ver = 3")
_updateIndices(db)
return ver
def _upgrade(col, ver):
if ver < 3:
# new deck properties
for d in col.decks.all():
d['dyn'] = 0
d['collapsed'] = False
col.decks.save(d)
if ver < 4:
col.modSchema(check=False)
clozes = []
for m in col.models.all():
if not "{{cloze:" in m['tmpls'][0]['qfmt']:
m['type'] = MODEL_STD
col.models.save(m)
else:
clozes.append(m)
for m in clozes:
_upgradeClozeModel(col, m)
col.db.execute("update col set ver = 4")
if ver < 5:
col.db.execute("update cards set odue = 0 where queue = 2")
col.db.execute("update col set ver = 5")
if ver < 6:
col.modSchema(check=False)
import anki.models
for m in col.models.all():
m['css'] = anki.models.defaultModel['css']
for t in m['tmpls']:
if 'css' not in t:
# ankidroid didn't bump version
continue
m['css'] += "\n" + t['css'].replace(
".card ", ".card%d "%(t['ord']+1))
del t['css']
col.models.save(m)
col.db.execute("update col set ver = 6")
if ver < 7:
col.modSchema(check=False)
col.db.execute(
"update cards set odue = 0 where (type = 1 or queue = 2) "
"and not odid")
col.db.execute("update col set ver = 7")
if ver < 8:
col.modSchema(check=False)
col.db.execute(
"update cards set due = due / 1000 where due > 4294967296")
col.db.execute("update col set ver = 8")
if ver < 9:
# adding an empty file to a zip makes python's zip code think it's a
# folder, so remove any empty files
changed = False
dir = col.media.dir()
if dir:
for f in os.listdir(col.media.dir()):
if os.path.isfile(f) and not os.path.getsize(f):
os.unlink(f)
col.media.db.execute(
"delete from log where fname = ?", f)
col.media.db.execute(
"delete from media where fname = ?", f)
changed = True
if changed:
col.media.db.commit()
col.db.execute("update col set ver = 9")
if ver < 10:
col.db.execute("""
update cards set left = left + left*1000 where queue = 1""")
col.db.execute("update col set ver = 10")
if ver < 11:
col.modSchema(check=False)
for d in col.decks.all():
if d['dyn']:
order = d['order']
# failed order was removed
if order >= 5:
order -= 1
d['terms'] = [[d['search'], d['limit'], order]]
del d['search']
del d['limit']
del d['order']
d['resched'] = True
d['return'] = True
else:
if 'extendNew' not in d:
d['extendNew'] = 10
d['extendRev'] = 50
col.decks.save(d)
for c in col.decks.allConf():
r = c['rev']
r['ivlFct'] = r.get("ivlfct", 1)
if 'ivlfct' in r:
del r['ivlfct']
r['maxIvl'] = 36500
col.decks.save(c)
for m in col.models.all():
for t in m['tmpls']:
t['bqfmt'] = ''
t['bafmt'] = ''
col.models.save(m)
col.db.execute("update col set ver = 11")
def _upgradeClozeModel(col, m):
m['type'] = MODEL_CLOZE
# convert first template
t = m['tmpls'][0]
for type in 'qfmt', 'afmt':
t[type] = re.sub("{{cloze:1:(.+?)}}", r"{{cloze:\1}}", t[type])
t['name'] = _("Cloze")
# delete non-cloze cards for the model
rem = []
for t in m['tmpls'][1:]:
if "{{cloze:" not in t['qfmt']:
rem.append(t)
for r in rem:
col.models.remTemplate(m, r)
del m['tmpls'][1:]
col.models._updateTemplOrds(m)
col.models.save(m)
# Creating a new collection
######################################################################
def _createDB(db):
db.execute("pragma page_size = 4096")
db.execute("pragma legacy_file_format = 0")
db.execute("vacuum")
_addSchema(db)
_updateIndices(db)
db.execute("analyze")
return SCHEMA_VERSION
def _addSchema(db, setColConf=True):
db.executescript("""
create table if not exists col (
id integer primary key,
crt integer not null,
mod integer not null,
scm integer not null,
ver integer not null,
dty integer not null,
usn integer not null,
ls integer not null,
conf text not null,
models text not null,
decks text not null,
dconf text not null,
tags text not null
);
create table if not exists notes (
id integer primary key, /* 0 */
guid text not null, /* 1 */
mid integer not null, /* 2 */
mod integer not null, /* 3 */
usn integer not null, /* 4 */
tags text not null, /* 5 */
flds text not null, /* 6 */
sfld integer not null, /* 7 */
csum integer not null, /* 8 */
flags integer not null, /* 9 */
data text not null /* 10 */
);
create table if not exists cards (
id integer primary key, /* 0 */
nid integer not null, /* 1 */
did integer not null, /* 2 */
ord integer not null, /* 3 */
mod integer not null, /* 4 */
usn integer not null, /* 5 */
type integer not null, /* 6 */
queue integer not null, /* 7 */
due integer not null, /* 8 */
ivl integer not null, /* 9 */
factor integer not null, /* 10 */
reps integer not null, /* 11 */
lapses integer not null, /* 12 */
left integer not null, /* 13 */
odue integer not null, /* 14 */
odid integer not null, /* 15 */
flags integer not null, /* 16 */
data text not null /* 17 */
);
create table if not exists revlog (
id integer primary key,
cid integer not null,
usn integer not null,
ease integer not null,
ivl integer not null,
lastIvl integer not null,
factor integer not null,
time integer not null,
type integer not null
);
create table if not exists graves (
usn integer not null,
oid integer not null,
type integer not null
);
insert or ignore into col
values(1,0,0,%(s)s,%(v)s,0,0,0,'','{}','','','{}');
""" % ({'v':SCHEMA_VERSION, 's':intTime(1000)}))
if setColConf:
_addColVars(db, *_getColVars(db))
def _getColVars(db):
import anki.collection
import anki.decks
g = copy.deepcopy(anki.decks.defaultDeck)
g['id'] = 1
g['name'] = _("Default")
g['conf'] = 1
g['mod'] = intTime()
gc = copy.deepcopy(anki.decks.defaultConf)
gc['id'] = 1
return g, gc, anki.collection.defaultConf.copy()
def _addColVars(db, g, gc, c):
db.execute("""
update col set conf = ?, decks = ?, dconf = ?""",
json.dumps(c),
json.dumps({'1': g}),
json.dumps({'1': gc}))
def _updateIndices(db):
"Add indices to the DB."
db.executescript("""
-- syncing
create index if not exists ix_notes_usn on notes (usn);
create index if not exists ix_cards_usn on cards (usn);
create index if not exists ix_revlog_usn on revlog (usn);
-- card spacing, etc
create index if not exists ix_cards_nid on cards (nid);
-- scheduling and deck limiting
create index if not exists ix_cards_sched on cards (did, queue, due);
-- revlog by card
create index if not exists ix_revlog_cid on revlog (cid);
-- field uniqueness
create index if not exists ix_notes_csum on notes (csum);
""")
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/storage.py
|
storage.py
|
jquery = '''
/*\n * jQuery JavaScript Library v1.5\n * http://jquery.com/\n *\n * Copyright 2011, John Resig\n * Dual licensed under the MIT or GPL Version 2 licenses.\n * http://jquery.org/license\n *\n * Includes Sizzle.js\n * http://sizzlejs.com/\n * Copyright 2011, The Dojo Foundation\n * Released under the MIT, BSD, and GPL Licenses.\n *\n * Date: Mon Jan 31 08:31:29 2011 -0500\n */\n(function(aR,G){var ag=aR.document;var a=(function(){var bh=function(bC,bD){return new bh.fn.init(bC,bD,bf)},bx=aR.jQuery,bj=aR.$,bf,bB=/^(?:[^<]*(<[\\w\\W]+>)[^>]*$|#([\\w\\-]+)$)/,bp=/\\S/,bl=/^\\s+/,bg=/\\s+$/,bk=/\\d/,bd=/^<(\\w+)\\s*\\/?>(?:<\\/\\1>)?$/,bq=/^[\\],:{}\\s]*$/,bz=/\\\\(?:["\\\\\\/bfnrt]|u[0-9a-fA-F]{4})/g,bs=/"[^"\\\\\\n\\r]*"|true|false|null|-?\\d+(?:\\.\\d*)?(?:[eE][+\\-]?\\d+)?/g,bm=/(?:^|:|,)(?:\\s*\\[)+/g,bb=/(webkit)[ \\/]([\\w.]+)/,bu=/(opera)(?:.*version)?[ \\/]([\\w.]+)/,bt=/(msie) ([\\w.]+)/,bv=/(mozilla)(?:.*? rv:([\\w.]+))?/,bA=navigator.userAgent,by,bw=false,be,a6="then done fail isResolved isRejected promise".split(" "),a7,bo=Object.prototype.toString,bi=Object.prototype.hasOwnProperty,bc=Array.prototype.push,bn=Array.prototype.slice,br=String.prototype.trim,a8=Array.prototype.indexOf,ba={};bh.fn=bh.prototype={constructor:bh,init:function(bC,bG,bF){var bE,bH,bD,bI;if(!bC){return this}if(bC.nodeType){this.context=this[0]=bC;this.length=1;return this}if(bC==="body"&&!bG&&ag.body){this.context=ag;this[0]=ag.body;this.selector="body";this.length=1;return this}if(typeof bC==="string"){bE=bB.exec(bC);if(bE&&(bE[1]||!bG)){if(bE[1]){bG=bG instanceof bh?bG[0]:bG;bI=(bG?bG.ownerDocument||bG:ag);bD=bd.exec(bC);if(bD){if(bh.isPlainObject(bG)){bC=[ag.createElement(bD[1])];bh.fn.attr.call(bC,bG,true)}else{bC=[bI.createElement(bD[1])]}}else{bD=bh.buildFragment([bE[1]],[bI]);bC=(bD.cacheable?bh.clone(bD.fragment):bD.fragment).childNodes}return bh.merge(this,bC)}else{bH=ag.getElementById(bE[2]);if(bH&&bH.parentNode){if(bH.id!==bE[2]){return bF.find(bC)}this.length=1;this[0]=bH}this.context=ag;this.selector=bC;return this}}else{if(!bG||bG.jquery){return(bG||bF).find(bC)}else{return this.constructor(bG).find(bC)}}}else{if(bh.isFunction(bC)){return bF.ready(bC)}}if(bC.selector!==G){this.selector=bC.selector;this.context=bC.context}return bh.makeArray(bC,this)},selector:"",jquery:"1.5",length:0,size:function(){return this.length},toArray:function(){return bn.call(this,0)},get:function(bC){return bC==null?this.toArray():(bC<0?this[this.length+bC]:this[bC])},pushStack:function(bD,bF,bC){var bE=this.constructor();if(bh.isArray(bD)){bc.apply(bE,bD)}else{bh.merge(bE,bD)}bE.prevObject=this;bE.context=this.context;if(bF==="find"){bE.selector=this.selector+(this.selector?" ":"")+bC}else{if(bF){bE.selector=this.selector+"."+bF+"("+bC+")"}}return bE},each:function(bD,bC){return bh.each(this,bD,bC)},ready:function(bC){bh.bindReady();be.done(bC);return this},eq:function(bC){return bC===-1?this.slice(bC):this.slice(bC,+bC+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(bn.apply(this,arguments),"slice",bn.call(arguments).join(","))},map:function(bC){return this.pushStack(bh.map(this,function(bE,bD){return bC.call(bE,bD,bE)}))},end:function(){return this.prevObject||this.constructor(null)},push:bc,sort:[].sort,splice:[].splice};bh.fn.init.prototype=bh.fn;bh.extend=bh.fn.extend=function(){var bL,bE,bC,bD,bI,bJ,bH=arguments[0]||{},bG=1,bF=arguments.length,bK=false;if(typeof bH==="boolean"){bK=bH;bH=arguments[1]||{};bG=2}if(typeof bH!=="object"&&!bh.isFunction(bH)){bH={}}if(bF===bG){bH=this;--bG}for(;bG<bF;bG++){if((bL=arguments[bG])!=null){for(bE in bL){bC=bH[bE];bD=bL[bE];if(bH===bD){continue}if(bK&&bD&&(bh.isPlainObject(bD)||(bI=bh.isArray(bD)))){if(bI){bI=false;bJ=bC&&bh.isArray(bC)?bC:[]}else{bJ=bC&&bh.isPlainObject(bC)?bC:{}}bH[bE]=bh.extend(bK,bJ,bD)}else{if(bD!==G){bH[bE]=bD}}}}}return bH};bh.extend({noConflict:function(bC){aR.$=bj;if(bC){aR.jQuery=bx}return bh},isReady:false,readyWait:1,ready:function(bC){if(bC===true){bh.readyWait--}if(!bh.readyWait||(bC!==true&&!bh.isReady)){if(!ag.body){return setTimeout(bh.ready,1)}bh.isReady=true;if(bC!==true&&--bh.readyWait>0){return}be.resolveWith(ag,[bh]);if(bh.fn.trigger){bh(ag).trigger("ready").unbind("ready")}}},bindReady:function(){if(bw){return}bw=true;if(ag.readyState==="complete"){return setTimeout(bh.ready,1)}if(ag.addEventListener){ag.addEventListener("DOMContentLoaded",a7,false);aR.addEventListener("load",bh.ready,false)}else{if(ag.attachEvent){ag.attachEvent("onreadystatechange",a7);aR.attachEvent("onload",bh.ready);var bC=false;try{bC=aR.frameElement==null}catch(bD){}if(ag.documentElement.doScroll&&bC){a9()}}}},isFunction:function(bC){return bh.type(bC)==="function"},isArray:Array.isArray||function(bC){return bh.type(bC)==="array"},isWindow:function(bC){return bC&&typeof bC==="object"&&"setInterval" in bC},isNaN:function(bC){return bC==null||!bk.test(bC)||isNaN(bC)},type:function(bC){return bC==null?String(bC):ba[bo.call(bC)]||"object"},isPlainObject:function(bD){if(!bD||bh.type(bD)!=="object"||bD.nodeType||bh.isWindow(bD)){return false}if(bD.constructor&&!bi.call(bD,"constructor")&&!bi.call(bD.constructor.prototype,"isPrototypeOf")){return false}var bC;for(bC in bD){}return bC===G||bi.call(bD,bC)},isEmptyObject:function(bD){for(var bC in bD){return false}return true},error:function(bC){throw bC},parseJSON:function(bC){if(typeof bC!=="string"||!bC){return null}bC=bh.trim(bC);if(bq.test(bC.replace(bz,"@").replace(bs,"]").replace(bm,""))){return aR.JSON&&aR.JSON.parse?aR.JSON.parse(bC):(new Function("return "+bC))()}else{bh.error("Invalid JSON: "+bC)}},parseXML:function(bE,bC,bD){if(aR.DOMParser){bD=new DOMParser();bC=bD.parseFromString(bE,"text/xml")}else{bC=new ActiveXObject("Microsoft.XMLDOM");bC.async="false";bC.loadXML(bE)}bD=bC.documentElement;if(!bD||!bD.nodeName||bD.nodeName==="parsererror"){bh.error("Invalid XML: "+bE)}return bC},noop:function(){},globalEval:function(bE){if(bE&&bp.test(bE)){var bD=ag.getElementsByTagName("head")[0]||ag.documentElement,bC=ag.createElement("script");bC.type="text/javascript";if(bh.support.scriptEval()){bC.appendChild(ag.createTextNode(bE))}else{bC.text=bE}bD.insertBefore(bC,bD.firstChild);bD.removeChild(bC)}},nodeName:function(bD,bC){return bD.nodeName&&bD.nodeName.toUpperCase()===bC.toUpperCase()},each:function(bF,bJ,bE){var bD,bG=0,bH=bF.length,bC=bH===G||bh.isFunction(bF);if(bE){if(bC){for(bD in bF){if(bJ.apply(bF[bD],bE)===false){break}}}else{for(;bG<bH;){if(bJ.apply(bF[bG++],bE)===false){break}}}}else{if(bC){for(bD in bF){if(bJ.call(bF[bD],bD,bF[bD])===false){break}}}else{for(var bI=bF[0];bG<bH&&bJ.call(bI,bG,bI)!==false;bI=bF[++bG]){}}}return bF},trim:br?function(bC){return bC==null?"":br.call(bC)}:function(bC){return bC==null?"":bC.toString().replace(bl,"").replace(bg,"")},makeArray:function(bF,bD){var bC=bD||[];if(bF!=null){var bE=bh.type(bF);if(bF.length==null||bE==="string"||bE==="function"||bE==="regexp"||bh.isWindow(bF)){bc.call(bC,bF)}else{bh.merge(bC,bF)}}return bC},inArray:function(bE,bF){if(bF.indexOf){return bF.indexOf(bE)}for(var bC=0,bD=bF.length;bC<bD;bC++){if(bF[bC]===bE){return bC}}return -1},merge:function(bG,bE){var bF=bG.length,bD=0;if(typeof bE.length==="number"){for(var bC=bE.length;bD<bC;bD++){bG[bF++]=bE[bD]}}else{while(bE[bD]!==G){bG[bF++]=bE[bD++]}}bG.length=bF;return bG},grep:function(bD,bI,bC){var bE=[],bH;bC=!!bC;for(var bF=0,bG=bD.length;bF<bG;bF++){bH=!!bI(bD[bF],bF);if(bC!==bH){bE.push(bD[bF])}}return bE},map:function(bD,bI,bC){var bE=[],bH;for(var bF=0,bG=bD.length;bF<bG;bF++){bH=bI(bD[bF],bF,bC);if(bH!=null){bE[bE.length]=bH}}return bE.concat.apply([],bE)},guid:1,proxy:function(bE,bD,bC){if(arguments.length===2){if(typeof bD==="string"){bC=bE;bE=bC[bD];bD=G}else{if(bD&&!bh.isFunction(bD)){bC=bD;bD=G}}}if(!bD&&bE){bD=function(){return bE.apply(bC||this,arguments)}}if(bE){bD.guid=bE.guid=bE.guid||bD.guid||bh.guid++}return bD},access:function(bC,bK,bI,bE,bH,bJ){var bD=bC.length;if(typeof bK==="object"){for(var bF in bK){bh.access(bC,bF,bK[bF],bE,bH,bI)}return bC}if(bI!==G){bE=!bJ&&bE&&bh.isFunction(bI);for(var bG=0;bG<bD;bG++){bH(bC[bG],bK,bE?bI.call(bC[bG],bG,bH(bC[bG],bK)):bI,bJ)}return bC}return bD?bH(bC[0],bK):G},now:function(){return(new Date()).getTime()},_Deferred:function(){var bF=[],bG,bD,bE,bC={done:function(){if(!bE){var bI=arguments,bJ,bM,bL,bK,bH;if(bG){bH=bG;bG=0}for(bJ=0,bM=bI.length;bJ<bM;bJ++){bL=bI[bJ];bK=bh.type(bL);if(bK==="array"){bC.done.apply(bC,bL)}else{if(bK==="function"){bF.push(bL)}}}if(bH){bC.resolveWith(bH[0],bH[1])}}return this},resolveWith:function(bI,bH){if(!bE&&!bG&&!bD){bD=1;try{while(bF[0]){bF.shift().apply(bI,bH)}}finally{bG=[bI,bH];bD=0}}return this},resolve:function(){bC.resolveWith(bh.isFunction(this.promise)?this.promise():this,arguments);return this},isResolved:function(){return !!(bD||bG)},cancel:function(){bE=1;bF=[];return this}};return bC},Deferred:function(bD){var bC=bh._Deferred(),bF=bh._Deferred(),bE;bh.extend(bC,{then:function(bH,bG){bC.done(bH).fail(bG);return this},fail:bF.done,rejectWith:bF.resolveWith,reject:bF.resolve,isRejected:bF.isResolved,promise:function(bH,bG){if(bH==null){if(bE){return bE}bE=bH={}}bG=a6.length;while(bG--){bH[a6[bG]]=bC[a6[bG]]}return bH}});bC.then(bF.cancel,bC.cancel);delete bC.cancel;if(bD){bD.call(bC,bC)}return bC},when:function(bF){var bE=arguments,bG=bE.length,bD=bG<=1&&bF&&bh.isFunction(bF.promise)?bF:bh.Deferred(),bH=bD.promise(),bC;if(bG>1){bC=new Array(bG);bh.each(bE,function(bI,bJ){bh.when(bJ).then(function(bK){bC[bI]=arguments.length>1?bn.call(arguments,0):bK;if(!--bG){bD.resolveWith(bH,bC)}},bD.reject)})}else{if(bD!==bF){bD.resolve(bF)}}return bH},uaMatch:function(bD){bD=bD.toLowerCase();var bC=bb.exec(bD)||bu.exec(bD)||bt.exec(bD)||bD.indexOf("compatible")<0&&bv.exec(bD)||[];return{browser:bC[1]||"",version:bC[2]||"0"}},sub:function(){function bD(bF,bG){return new bD.fn.init(bF,bG)}bh.extend(true,bD,this);bD.superclass=this;bD.fn=bD.prototype=this();bD.fn.constructor=bD;bD.subclass=this.subclass;bD.fn.init=function bE(bF,bG){if(bG&&bG instanceof bh&&!(bG instanceof bD)){bG=bD(bG)}return bh.fn.init.call(this,bF,bG,bC)};bD.fn.init.prototype=bD.fn;var bC=bD(ag);return bD},browser:{}});be=bh._Deferred();bh.each("Boolean Number String Function Array Date RegExp Object".split(" "),function(bD,bC){ba["[object "+bC+"]"]=bC.toLowerCase()});by=bh.uaMatch(bA);if(by.browser){bh.browser[by.browser]=true;bh.browser.version=by.version}if(bh.browser.webkit){bh.browser.safari=true}if(a8){bh.inArray=function(bC,bD){return a8.call(bD,bC)}}bf=bh(ag);if(ag.addEventListener){a7=function(){ag.removeEventListener("DOMContentLoaded",a7,false);bh.ready()}}else{if(ag.attachEvent){a7=function(){if(ag.readyState==="complete"){ag.detachEvent("onreadystatechange",a7);bh.ready()}}}}function a9(){if(bh.isReady){return}try{ag.documentElement.doScroll("left")}catch(bC){setTimeout(a9,1);return}bh.ready()}return(aR.jQuery=aR.$=bh)})();(function(){a.support={};var a6=ag.createElement("div");a6.style.display="none";a6.innerHTML=" <link/><table></table><a href=\'/a\' style=\'color:red;float:left;opacity:.55;\'>a</a><input type=\'checkbox\'/>";var bd=a6.getElementsByTagName("*"),bb=a6.getElementsByTagName("a")[0],bc=ag.createElement("select"),a7=bc.appendChild(ag.createElement("option"));if(!bd||!bd.length||!bb){return}a.support={leadingWhitespace:a6.firstChild.nodeType===3,tbody:!a6.getElementsByTagName("tbody").length,htmlSerialize:!!a6.getElementsByTagName("link").length,style:/red/.test(bb.getAttribute("style")),hrefNormalized:bb.getAttribute("href")==="/a",opacity:/^0.55$/.test(bb.style.opacity),cssFloat:!!bb.style.cssFloat,checkOn:a6.getElementsByTagName("input")[0].value==="on",optSelected:a7.selected,deleteExpando:true,optDisabled:false,checkClone:false,_scriptEval:null,noCloneEvent:true,boxModel:null,inlineBlockNeedsLayout:false,shrinkWrapBlocks:false,reliableHiddenOffsets:true};bc.disabled=true;a.support.optDisabled=!a7.disabled;a.support.scriptEval=function(){if(a.support._scriptEval===null){var bf=ag.documentElement,bg=ag.createElement("script"),bi="script"+a.now();bg.type="text/javascript";try{bg.appendChild(ag.createTextNode("window."+bi+"=1;"))}catch(bh){}bf.insertBefore(bg,bf.firstChild);if(aR[bi]){a.support._scriptEval=true;delete aR[bi]}else{a.support._scriptEval=false}bf.removeChild(bg);bf=bg=bi=null}return a.support._scriptEval};try{delete a6.test}catch(a8){a.support.deleteExpando=false}if(a6.attachEvent&&a6.fireEvent){a6.attachEvent("onclick",function be(){a.support.noCloneEvent=false;a6.detachEvent("onclick",be)});a6.cloneNode(true).fireEvent("onclick")}a6=ag.createElement("div");a6.innerHTML="<input type=\'radio\' name=\'radiotest\' checked=\'checked\'/>";var a9=ag.createDocumentFragment();a9.appendChild(a6.firstChild);a.support.checkClone=a9.cloneNode(true).cloneNode(true).lastChild.checked;a(function(){var bh=ag.createElement("div"),bf=ag.getElementsByTagName("body")[0];if(!bf){return}bh.style.width=bh.style.paddingLeft="1px";bf.appendChild(bh);a.boxModel=a.support.boxModel=bh.offsetWidth===2;if("zoom" in bh.style){bh.style.display="inline";bh.style.zoom=1;a.support.inlineBlockNeedsLayout=bh.offsetWidth===2;bh.style.display="";bh.innerHTML="<div style=\'width:4px;\'></div>";a.support.shrinkWrapBlocks=bh.offsetWidth!==2}bh.innerHTML="<table><tr><td style=\'padding:0;border:0;display:none\'></td><td>t</td></tr></table>";var bg=bh.getElementsByTagName("td");a.support.reliableHiddenOffsets=bg[0].offsetHeight===0;bg[0].style.display="";bg[1].style.display="none";a.support.reliableHiddenOffsets=a.support.reliableHiddenOffsets&&bg[0].offsetHeight===0;bh.innerHTML="";bf.removeChild(bh).style.display="none";bh=bg=null});var ba=function(bf){var bh=ag.createElement("div");bf="on"+bf;if(!bh.attachEvent){return true}var bg=(bf in bh);if(!bg){bh.setAttribute(bf,"return;");bg=typeof bh[bf]==="function"}bh=null;return bg};a.support.submitBubbles=ba("submit");a.support.changeBubbles=ba("change");a6=bd=bb=null})();var av=/^(?:\\{.*\\}|\\[.*\\])$/;a.extend({cache:{},uuid:0,expando:"jQuery"+(a.fn.jquery+Math.random()).replace(/\\D/g,""),noData:{embed:true,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:true},hasData:function(a6){a6=a6.nodeType?a.cache[a6[a.expando]]:a6[a.expando];return !!a6&&!a.isEmptyObject(a6)},data:function(a9,a7,bb,ba){if(!a.acceptData(a9)){return}var be=a.expando,bd=typeof a7==="string",bc,bf=a9.nodeType,a6=bf?a.cache:a9,a8=bf?a9[a.expando]:a9[a.expando]&&a.expando;if((!a8||(ba&&a8&&!a6[a8][be]))&&bd&&bb===G){return}if(!a8){if(bf){a9[a.expando]=a8=++a.uuid}else{a8=a.expando}}if(!a6[a8]){a6[a8]={}}if(typeof a7==="object"){if(ba){a6[a8][be]=a.extend(a6[a8][be],a7)}else{a6[a8]=a.extend(a6[a8],a7)}}bc=a6[a8];if(ba){if(!bc[be]){bc[be]={}}bc=bc[be]}if(bb!==G){bc[a7]=bb}if(a7==="events"&&!bc[a7]){return bc[be]&&bc[be].events}return bd?bc[a7]:bc},removeData:function(ba,a8,bb){if(!a.acceptData(ba)){return}var bd=a.expando,be=ba.nodeType,a7=be?a.cache:ba,a9=be?ba[a.expando]:a.expando;if(!a7[a9]){return}if(a8){var bc=bb?a7[a9][bd]:a7[a9];if(bc){delete bc[a8];if(!a.isEmptyObject(bc)){return}}}if(bb){delete a7[a9][bd];if(!a.isEmptyObject(a7[a9])){return}}var a6=a7[a9][bd];if(a.support.deleteExpando||a7!=aR){delete a7[a9]}else{a7[a9]=null}if(a6){a7[a9]={};a7[a9][bd]=a6}else{if(be){if(a.support.deleteExpando){delete ba[a.expando]}else{if(ba.removeAttribute){ba.removeAttribute(a.expando)}else{ba[a.expando]=null}}}}},_data:function(a7,a6,a8){return a.data(a7,a6,a8,true)},acceptData:function(a7){if(a7.nodeName){var a6=a.noData[a7.nodeName.toLowerCase()];if(a6){return !(a6===true||a7.getAttribute("classid")!==a6)}}return true}});a.fn.extend({data:function(ba,bc){var bb=null;if(typeof ba==="undefined"){if(this.length){bb=a.data(this[0]);if(this[0].nodeType===1){var a6=this[0].attributes,a8;for(var a9=0,a7=a6.length;a9<a7;a9++){a8=a6[a9].name;if(a8.indexOf("data-")===0){a8=a8.substr(5);aM(this[0],a8,bb[a8])}}}}return bb}else{if(typeof ba==="object"){return this.each(function(){a.data(this,ba)})}}var bd=ba.split(".");bd[1]=bd[1]?"."+bd[1]:"";if(bc===G){bb=this.triggerHandler("getData"+bd[1]+"!",[bd[0]]);if(bb===G&&this.length){bb=a.data(this[0],ba);bb=aM(this[0],ba,bb)}return bb===G&&bd[1]?this.data(bd[0]):bb}else{return this.each(function(){var bf=a(this),be=[bd[0],bc];bf.triggerHandler("setData"+bd[1]+"!",be);a.data(this,ba,bc);bf.triggerHandler("changeData"+bd[1]+"!",be)})}},removeData:function(a6){return this.each(function(){a.removeData(this,a6)})}});function aM(a7,a6,a8){if(a8===G&&a7.nodeType===1){a8=a7.getAttribute("data-"+a6);if(typeof a8==="string"){try{a8=a8==="true"?true:a8==="false"?false:a8==="null"?null:!a.isNaN(a8)?parseFloat(a8):av.test(a8)?a.parseJSON(a8):a8}catch(a9){}a.data(a7,a6,a8)}else{a8=G}}return a8}a.extend({queue:function(a7,a6,a9){if(!a7){return}a6=(a6||"fx")+"queue";var a8=a._data(a7,a6);if(!a9){return a8||[]}if(!a8||a.isArray(a9)){a8=a._data(a7,a6,a.makeArray(a9))}else{a8.push(a9)}return a8},dequeue:function(a9,a8){a8=a8||"fx";var a6=a.queue(a9,a8),a7=a6.shift();if(a7==="inprogress"){a7=a6.shift()}if(a7){if(a8==="fx"){a6.unshift("inprogress")}a7.call(a9,function(){a.dequeue(a9,a8)})}if(!a6.length){a.removeData(a9,a8+"queue",true)}}});a.fn.extend({queue:function(a6,a7){if(typeof a6!=="string"){a7=a6;a6="fx"}if(a7===G){return a.queue(this[0],a6)}return this.each(function(a9){var a8=a.queue(this,a6,a7);if(a6==="fx"&&a8[0]!=="inprogress"){a.dequeue(this,a6)}})},dequeue:function(a6){return this.each(function(){a.dequeue(this,a6)})},delay:function(a7,a6){a7=a.fx?a.fx.speeds[a7]||a7:a7;a6=a6||"fx";return this.queue(a6,function(){var a8=this;setTimeout(function(){a.dequeue(a8,a6)},a7)})},clearQueue:function(a6){return this.queue(a6||"fx",[])}});var at=/[\\n\\t\\r]/g,aV=/\\s+/,ax=/\\r/g,aU=/^(?:href|src|style)$/,e=/^(?:button|input)$/i,B=/^(?:button|input|object|select|textarea)$/i,k=/^a(?:rea)?$/i,N=/^(?:radio|checkbox)$/i;a.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};a.fn.extend({attr:function(a6,a7){return a.access(this,a6,a7,true,a.attr)},removeAttr:function(a6,a7){return this.each(function(){a.attr(this,a6,"");if(this.nodeType===1){this.removeAttribute(a6)}})},addClass:function(bd){if(a.isFunction(bd)){return this.each(function(bg){var bf=a(this);bf.addClass(bd.call(this,bg,bf.attr("class")))})}if(bd&&typeof bd==="string"){var a6=(bd||"").split(aV);for(var a9=0,a8=this.length;a9<a8;a9++){var a7=this[a9];if(a7.nodeType===1){if(!a7.className){a7.className=bd}else{var ba=" "+a7.className+" ",bc=a7.className;for(var bb=0,be=a6.length;bb<be;bb++){if(ba.indexOf(" "+a6[bb]+" ")<0){bc+=" "+a6[bb]}}a7.className=a.trim(bc)}}}}return this},removeClass:function(bb){if(a.isFunction(bb)){return this.each(function(bf){var be=a(this);be.removeClass(bb.call(this,bf,be.attr("class")))})}if((bb&&typeof bb==="string")||bb===G){var bc=(bb||"").split(aV);for(var a8=0,a7=this.length;a8<a7;a8++){var ba=this[a8];if(ba.nodeType===1&&ba.className){if(bb){var a9=(" "+ba.className+" ").replace(at," ");for(var bd=0,a6=bc.length;bd<a6;bd++){a9=a9.replace(" "+bc[bd]+" "," ")}ba.className=a.trim(a9)}else{ba.className=""}}}}return this},toggleClass:function(a9,a7){var a8=typeof a9,a6=typeof a7==="boolean";if(a.isFunction(a9)){return this.each(function(bb){var ba=a(this);ba.toggleClass(a9.call(this,bb,ba.attr("class"),a7),a7)})}return this.each(function(){if(a8==="string"){var bc,bb=0,ba=a(this),bd=a7,be=a9.split(aV);while((bc=be[bb++])){bd=a6?bd:!ba.hasClass(bc);ba[bd?"addClass":"removeClass"](bc)}}else{if(a8==="undefined"||a8==="boolean"){if(this.className){a._data(this,"__className__",this.className)}this.className=this.className||a9===false?"":a._data(this,"__className__")||""}}})},hasClass:function(a6){var a9=" "+a6+" ";for(var a8=0,a7=this.length;a8<a7;a8++){if((" "+this[a8].className+" ").replace(at," ").indexOf(a9)>-1){return true}}return false},val:function(be){if(!arguments.length){var a8=this[0];if(a8){if(a.nodeName(a8,"option")){var a7=a8.attributes.value;return !a7||a7.specified?a8.value:a8.text}if(a.nodeName(a8,"select")){var bc=a8.selectedIndex,bf=[],bg=a8.options,bb=a8.type==="select-one";if(bc<0){return null}for(var a9=bb?bc:0,bd=bb?bc+1:bg.length;a9<bd;a9++){var ba=bg[a9];if(ba.selected&&(a.support.optDisabled?!ba.disabled:ba.getAttribute("disabled")===null)&&(!ba.parentNode.disabled||!a.nodeName(ba.parentNode,"optgroup"))){be=a(ba).val();if(bb){return be}bf.push(be)}}return bf}if(N.test(a8.type)&&!a.support.checkOn){return a8.getAttribute("value")===null?"on":a8.value}return(a8.value||"").replace(ax,"")}return G}var a6=a.isFunction(be);return this.each(function(bj){var bi=a(this),bk=be;if(this.nodeType!==1){return}if(a6){bk=be.call(this,bj,bi.val())}if(bk==null){bk=""}else{if(typeof bk==="number"){bk+=""}else{if(a.isArray(bk)){bk=a.map(bk,function(bl){return bl==null?"":bl+""})}}}if(a.isArray(bk)&&N.test(this.type)){this.checked=a.inArray(bi.val(),bk)>=0}else{if(a.nodeName(this,"select")){var bh=a.makeArray(bk);a("option",this).each(function(){this.selected=a.inArray(a(this).val(),bh)>=0});if(!bh.length){this.selectedIndex=-1}}else{this.value=bk}}})}});a.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a7,a6,bc,bf){if(!a7||a7.nodeType===3||a7.nodeType===8||a7.nodeType===2){return G}if(bf&&a6 in a.attrFn){return a(a7)[a6](bc)}var a8=a7.nodeType!==1||!a.isXMLDoc(a7),bb=bc!==G;a6=a8&&a.props[a6]||a6;if(a7.nodeType===1){var ba=aU.test(a6);if(a6==="selected"&&!a.support.optSelected){var bd=a7.parentNode;if(bd){bd.selectedIndex;if(bd.parentNode){bd.parentNode.selectedIndex}}}if((a6 in a7||a7[a6]!==G)&&a8&&!ba){if(bb){if(a6==="type"&&e.test(a7.nodeName)&&a7.parentNode){a.error("type property can\'t be changed")}if(bc===null){if(a7.nodeType===1){a7.removeAttribute(a6)}}else{a7[a6]=bc}}if(a.nodeName(a7,"form")&&a7.getAttributeNode(a6)){return a7.getAttributeNode(a6).nodeValue}if(a6==="tabIndex"){var be=a7.getAttributeNode("tabIndex");return be&&be.specified?be.value:B.test(a7.nodeName)||k.test(a7.nodeName)&&a7.href?0:G}return a7[a6]}if(!a.support.style&&a8&&a6==="style"){if(bb){a7.style.cssText=""+bc}return a7.style.cssText}if(bb){a7.setAttribute(a6,""+bc)}if(!a7.attributes[a6]&&(a7.hasAttribute&&!a7.hasAttribute(a6))){return G}var a9=!a.support.hrefNormalized&&a8&&ba?a7.getAttribute(a6,2):a7.getAttribute(a6);return a9===null?G:a9}if(bb){a7[a6]=bc}return a7[a6]}});var aI=/\\.(.*)$/,aT=/^(?:textarea|input|select)$/i,I=/\\./g,W=/ /g,ao=/[^\\w\\s.|`]/g,D=function(a6){return a6.replace(ao,"\\\\$&")},aA="events";a.event={add:function(a9,bd,bi,bb){if(a9.nodeType===3||a9.nodeType===8){return}if(a.isWindow(a9)&&(a9!==aR&&!a9.frameElement)){a9=aR}if(bi===false){bi=aX}else{if(!bi){return}}var a7,bh;if(bi.handler){a7=bi;bi=a7.handler}if(!bi.guid){bi.guid=a.guid++}var be=a._data(a9);if(!be){return}var bj=be[aA],bc=be.handle;if(typeof bj==="function"){bc=bj.handle;bj=bj.events}else{if(!bj){if(!a9.nodeType){be[aA]=be=function(){}}be.events=bj={}}}if(!bc){be.handle=bc=function(){return typeof a!=="undefined"&&!a.event.triggered?a.event.handle.apply(bc.elem,arguments):G}}bc.elem=a9;bd=bd.split(" ");var bg,ba=0,a6;while((bg=bd[ba++])){bh=a7?a.extend({},a7):{handler:bi,data:bb};if(bg.indexOf(".")>-1){a6=bg.split(".");bg=a6.shift();bh.namespace=a6.slice(0).sort().join(".")}else{a6=[];bh.namespace=""}bh.type=bg;if(!bh.guid){bh.guid=bi.guid}var a8=bj[bg],bf=a.event.special[bg]||{};if(!a8){a8=bj[bg]=[];if(!bf.setup||bf.setup.call(a9,bb,a6,bc)===false){if(a9.addEventListener){a9.addEventListener(bg,bc,false)}else{if(a9.attachEvent){a9.attachEvent("on"+bg,bc)}}}}if(bf.add){bf.add.call(a9,bh);if(!bh.handler.guid){bh.handler.guid=bi.guid}}a8.push(bh);a.event.global[bg]=true}a9=null},global:{},remove:function(bl,bg,a8,bc){if(bl.nodeType===3||bl.nodeType===8){return}if(a8===false){a8=aX}var bo,bb,bd,bi,bj=0,a9,be,bh,ba,bf,a6,bn,bk=a.hasData(bl)&&a._data(bl),a7=bk&&bk[aA];if(!bk||!a7){return}if(typeof a7==="function"){bk=a7;a7=a7.events}if(bg&&bg.type){a8=bg.handler;bg=bg.type}if(!bg||typeof bg==="string"&&bg.charAt(0)==="."){bg=bg||"";for(bb in a7){a.event.remove(bl,bb+bg)}return}bg=bg.split(" ");while((bb=bg[bj++])){bn=bb;a6=null;a9=bb.indexOf(".")<0;be=[];if(!a9){be=bb.split(".");bb=be.shift();bh=new RegExp("(^|\\\\.)"+a.map(be.slice(0).sort(),D).join("\\\\.(?:.*\\\\.)?")+"(\\\\.|$)")}bf=a7[bb];if(!bf){continue}if(!a8){for(bi=0;bi<bf.length;bi++){a6=bf[bi];if(a9||bh.test(a6.namespace)){a.event.remove(bl,bn,a6.handler,bi);bf.splice(bi--,1)}}continue}ba=a.event.special[bb]||{};for(bi=bc||0;bi<bf.length;bi++){a6=bf[bi];if(a8.guid===a6.guid){if(a9||bh.test(a6.namespace)){if(bc==null){bf.splice(bi--,1)}if(ba.remove){ba.remove.call(bl,a6)}}if(bc!=null){break}}}if(bf.length===0||bc!=null&&bf.length===1){if(!ba.teardown||ba.teardown.call(bl,be)===false){a.removeEvent(bl,bb,bk.handle)}bo=null;delete a7[bb]}}if(a.isEmptyObject(a7)){var bm=bk.handle;if(bm){bm.elem=null}delete bk.events;delete bk.handle;if(typeof bk==="function"){a.removeData(bl,aA,true)}else{if(a.isEmptyObject(bk)){a.removeData(bl,G,true)}}}},trigger:function(a7,bc,a9){var bg=a7.type||a7,bb=arguments[3];if(!bb){a7=typeof a7==="object"?a7[a.expando]?a7:a.extend(a.Event(bg),a7):a.Event(bg);if(bg.indexOf("!")>=0){a7.type=bg=bg.slice(0,-1);a7.exclusive=true}if(!a9){a7.stopPropagation();if(a.event.global[bg]){a.each(a.cache,function(){var bl=a.expando,bk=this[bl];if(bk&&bk.events&&bk.events[bg]){a.event.trigger(a7,bc,bk.handle.elem)}})}}if(!a9||a9.nodeType===3||a9.nodeType===8){return G}a7.result=G;a7.target=a9;bc=a.makeArray(bc);bc.unshift(a7)}a7.currentTarget=a9;var bd=a9.nodeType?a._data(a9,"handle"):(a._data(a9,aA)||{}).handle;if(bd){bd.apply(a9,bc)}var bi=a9.parentNode||a9.ownerDocument;try{if(!(a9&&a9.nodeName&&a.noData[a9.nodeName.toLowerCase()])){if(a9["on"+bg]&&a9["on"+bg].apply(a9,bc)===false){a7.result=false;a7.preventDefault()}}}catch(bh){}if(!a7.isPropagationStopped()&&bi){a.event.trigger(a7,bc,bi,true)}else{if(!a7.isDefaultPrevented()){var a8,be=a7.target,a6=bg.replace(aI,""),bj=a.nodeName(be,"a")&&a6==="click",bf=a.event.special[a6]||{};if((!bf._default||bf._default.call(a9,a7)===false)&&!bj&&!(be&&be.nodeName&&a.noData[be.nodeName.toLowerCase()])){try{if(be[a6]){a8=be["on"+a6];if(a8){be["on"+a6]=null}a.event.triggered=true;be[a6]()}}catch(ba){}if(a8){be["on"+a6]=a8}a.event.triggered=false}}}},handle:function(a6){var bf,a8,a7,bh,bg,bb=[],bd=a.makeArray(arguments);a6=bd[0]=a.event.fix(a6||aR.event);a6.currentTarget=this;bf=a6.type.indexOf(".")<0&&!a6.exclusive;if(!bf){a7=a6.type.split(".");a6.type=a7.shift();bb=a7.slice(0).sort();bh=new RegExp("(^|\\\\.)"+bb.join("\\\\.(?:.*\\\\.)?")+"(\\\\.|$)")}a6.namespace=a6.namespace||bb.join(".");bg=a._data(this,aA);if(typeof bg==="function"){bg=bg.events}a8=(bg||{})[a6.type];if(bg&&a8){a8=a8.slice(0);for(var ba=0,a9=a8.length;ba<a9;ba++){var be=a8[ba];if(bf||bh.test(be.namespace)){a6.handler=be.handler;a6.data=be.data;a6.handleObj=be;var bc=be.handler.apply(this,bd);if(bc!==G){a6.result=bc;if(bc===false){a6.preventDefault();a6.stopPropagation()}}if(a6.isImmediatePropagationStopped()){break}}}}return a6.result},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),fix:function(a9){if(a9[a.expando]){return a9}var a7=a9;a9=a.Event(a7);for(var a8=this.props.length,bb;a8;){bb=this.props[--a8];a9[bb]=a7[bb]}if(!a9.target){a9.target=a9.srcElement||ag}if(a9.target.nodeType===3){a9.target=a9.target.parentNode}if(!a9.relatedTarget&&a9.fromElement){a9.relatedTarget=a9.fromElement===a9.target?a9.toElement:a9.fromElement}if(a9.pageX==null&&a9.clientX!=null){var ba=ag.documentElement,a6=ag.body;a9.pageX=a9.clientX+(ba&&ba.scrollLeft||a6&&a6.scrollLeft||0)-(ba&&ba.clientLeft||a6&&a6.clientLeft||0);a9.pageY=a9.clientY+(ba&&ba.scrollTop||a6&&a6.scrollTop||0)-(ba&&ba.clientTop||a6&&a6.clientTop||0)}if(a9.which==null&&(a9.charCode!=null||a9.keyCode!=null)){a9.which=a9.charCode!=null?a9.charCode:a9.keyCode}if(!a9.metaKey&&a9.ctrlKey){a9.metaKey=a9.ctrlKey}if(!a9.which&&a9.button!==G){a9.which=(a9.button&1?1:(a9.button&2?3:(a9.button&4?2:0)))}return a9},guid:100000000,proxy:a.proxy,special:{ready:{setup:a.bindReady,teardown:a.noop},live:{add:function(a6){a.event.add(this,o(a6.origType,a6.selector),a.extend({},a6,{handler:aa,guid:a6.handler.guid}))},remove:function(a6){a.event.remove(this,o(a6.origType,a6.selector),a6)}},beforeunload:{setup:function(a8,a7,a6){if(a.isWindow(this)){this.onbeforeunload=a6}},teardown:function(a7,a6){if(this.onbeforeunload===a6){this.onbeforeunload=null}}}}};a.removeEvent=ag.removeEventListener?function(a7,a6,a8){if(a7.removeEventListener){a7.removeEventListener(a6,a8,false)}}:function(a7,a6,a8){if(a7.detachEvent){a7.detachEvent("on"+a6,a8)}};a.Event=function(a6){if(!this.preventDefault){return new a.Event(a6)}if(a6&&a6.type){this.originalEvent=a6;this.type=a6.type;this.isDefaultPrevented=(a6.defaultPrevented||a6.returnValue===false||a6.getPreventDefault&&a6.getPreventDefault())?g:aX}else{this.type=a6}this.timeStamp=a.now();this[a.expando]=true};function aX(){return false}function g(){return true}a.Event.prototype={preventDefault:function(){this.isDefaultPrevented=g;var a6=this.originalEvent;if(!a6){return}if(a6.preventDefault){a6.preventDefault()}else{a6.returnValue=false}},stopPropagation:function(){this.isPropagationStopped=g;var a6=this.originalEvent;if(!a6){return}if(a6.stopPropagation){a6.stopPropagation()}a6.cancelBubble=true},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=g;this.stopPropagation()},isDefaultPrevented:aX,isPropagationStopped:aX,isImmediatePropagationStopped:aX};var V=function(a7){var a6=a7.relatedTarget;try{while(a6&&a6!==this){a6=a6.parentNode}if(a6!==this){a7.type=a7.data;a.event.handle.apply(this,arguments)}}catch(a8){}},aB=function(a6){a6.type=a6.data;a.event.handle.apply(this,arguments)};a.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a7,a6){a.event.special[a7]={setup:function(a8){a.event.add(this,a6,a8&&a8.selector?aB:V,a7)},teardown:function(a8){a.event.remove(this,a6,a8&&a8.selector?aB:V)}}});if(!a.support.submitBubbles){a.event.special.submit={setup:function(a7,a6){if(this.nodeName&&this.nodeName.toLowerCase()!=="form"){a.event.add(this,"click.specialSubmit",function(ba){var a9=ba.target,a8=a9.type;if((a8==="submit"||a8==="image")&&a(a9).closest("form").length){ba.liveFired=G;return aF("submit",this,arguments)}});a.event.add(this,"keypress.specialSubmit",function(ba){var a9=ba.target,a8=a9.type;if((a8==="text"||a8==="password")&&a(a9).closest("form").length&&ba.keyCode===13){ba.liveFired=G;return aF("submit",this,arguments)}})}else{return false}},teardown:function(a6){a.event.remove(this,".specialSubmit")}}}if(!a.support.changeBubbles){var aY,j=function(a7){var a6=a7.type,a8=a7.value;if(a6==="radio"||a6==="checkbox"){a8=a7.checked}else{if(a6==="select-multiple"){a8=a7.selectedIndex>-1?a.map(a7.options,function(a9){return a9.selected}).join("-"):""}else{if(a7.nodeName.toLowerCase()==="select"){a8=a7.selectedIndex}}}return a8},T=function T(a8){var a6=a8.target,a7,a9;if(!aT.test(a6.nodeName)||a6.readOnly){return}a7=a._data(a6,"_change_data");a9=j(a6);if(a8.type!=="focusout"||a6.type!=="radio"){a._data(a6,"_change_data",a9)}if(a7===G||a9===a7){return}if(a7!=null||a9){a8.type="change";a8.liveFired=G;return a.event.trigger(a8,arguments[1],a6)}};a.event.special.change={filters:{focusout:T,beforedeactivate:T,click:function(a8){var a7=a8.target,a6=a7.type;if(a6==="radio"||a6==="checkbox"||a7.nodeName.toLowerCase()==="select"){return T.call(this,a8)}},keydown:function(a8){var a7=a8.target,a6=a7.type;if((a8.keyCode===13&&a7.nodeName.toLowerCase()!=="textarea")||(a8.keyCode===32&&(a6==="checkbox"||a6==="radio"))||a6==="select-multiple"){return T.call(this,a8)}},beforeactivate:function(a7){var a6=a7.target;a._data(a6,"_change_data",j(a6))}},setup:function(a8,a7){if(this.type==="file"){return false}for(var a6 in aY){a.event.add(this,a6+".specialChange",aY[a6])}return aT.test(this.nodeName)},teardown:function(a6){a.event.remove(this,".specialChange");return aT.test(this.nodeName)}};aY=a.event.special.change.filters;aY.focus=aY.beforeactivate}function aF(a7,a8,a6){a6[0].type=a7;return a.event.handle.apply(a8,a6)}if(ag.addEventListener){a.each({focus:"focusin",blur:"focusout"},function(a8,a6){a.event.special[a6]={setup:function(){this.addEventListener(a8,a7,true)},teardown:function(){this.removeEventListener(a8,a7,true)}};function a7(a9){a9=a.event.fix(a9);a9.type=a6;return a.event.handle.call(this,a9)}})}a.each(["bind","one"],function(a7,a6){a.fn[a6]=function(bd,be,bc){if(typeof bd==="object"){for(var ba in bd){this[a6](ba,be,bd[ba],bc)}return this}if(a.isFunction(be)||be===false){bc=be;be=G}var bb=a6==="one"?a.proxy(bc,function(bf){a(this).unbind(bf,bb);return bc.apply(this,arguments)}):bc;if(bd==="unload"&&a6!=="one"){this.one(bd,be,bc)}else{for(var a9=0,a8=this.length;a9<a8;a9++){a.event.add(this[a9],bd,bb,be)}}return this}});a.fn.extend({unbind:function(ba,a9){if(typeof ba==="object"&&!ba.preventDefault){for(var a8 in ba){this.unbind(a8,ba[a8])}}else{for(var a7=0,a6=this.length;a7<a6;a7++){a.event.remove(this[a7],ba,a9)}}return this},delegate:function(a6,a7,a9,a8){return this.live(a7,a9,a8,a6)},undelegate:function(a6,a7,a8){if(arguments.length===0){return this.unbind("live")}else{return this.die(a7,null,a8,a6)}},trigger:function(a6,a7){return this.each(function(){a.event.trigger(a6,a7,this)})},triggerHandler:function(a6,a8){if(this[0]){var a7=a.Event(a6);a7.preventDefault();a7.stopPropagation();a.event.trigger(a7,a8,this[0]);return a7.result}},toggle:function(a8){var a6=arguments,a7=1;while(a7<a6.length){a.proxy(a8,a6[a7++])}return this.click(a.proxy(a8,function(a9){var ba=(a._data(this,"lastToggle"+a8.guid)||0)%a7;a._data(this,"lastToggle"+a8.guid,ba+1);a9.preventDefault();return a6[ba].apply(this,arguments)||false}))},hover:function(a6,a7){return this.mouseenter(a6).mouseleave(a7||a6)}});var ay={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};a.each(["live","die"],function(a7,a6){a.fn[a6]=function(bh,be,bj,ba){var bi,bf=0,bg,a9,bl,bc=ba||this.selector,a8=ba?this:a(this.context);if(typeof bh==="object"&&!bh.preventDefault){for(var bk in bh){a8[a6](bk,be,bh[bk],bc)}return this}if(a.isFunction(be)){bj=be;be=G}bh=(bh||"").split(" ");while((bi=bh[bf++])!=null){bg=aI.exec(bi);a9="";if(bg){a9=bg[0];bi=bi.replace(aI,"")}if(bi==="hover"){bh.push("mouseenter"+a9,"mouseleave"+a9);continue}bl=bi;if(bi==="focus"||bi==="blur"){bh.push(ay[bi]+a9);bi=bi+a9}else{bi=(ay[bi]||bi)+a9}if(a6==="live"){for(var bd=0,bb=a8.length;bd<bb;bd++){a.event.add(a8[bd],"live."+o(bi,bc),{data:be,selector:bc,handler:bj,origType:bi,origHandler:bj,preType:bl})}}else{a8.unbind("live."+o(bi,bc),bj)}}return this}});function aa(bh){var be,a9,bn,bb,a6,bj,bg,bi,bf,bm,bd,bc,bl,bk=[],ba=[],a7=a._data(this,aA);if(typeof a7==="function"){a7=a7.events}if(bh.liveFired===this||!a7||!a7.live||bh.target.disabled||bh.button&&bh.type==="click"){return}if(bh.namespace){bc=new RegExp("(^|\\\\.)"+bh.namespace.split(".").join("\\\\.(?:.*\\\\.)?")+"(\\\\.|$)")}bh.liveFired=this;var a8=a7.live.slice(0);for(bg=0;bg<a8.length;bg++){a6=a8[bg];if(a6.origType.replace(aI,"")===bh.type){ba.push(a6.selector)}else{a8.splice(bg--,1)}}bb=a(bh.target).closest(ba,bh.currentTarget);for(bi=0,bf=bb.length;bi<bf;bi++){bd=bb[bi];for(bg=0;bg<a8.length;bg++){a6=a8[bg];if(bd.selector===a6.selector&&(!bc||bc.test(a6.namespace))){bj=bd.elem;bn=null;if(a6.preType==="mouseenter"||a6.preType==="mouseleave"){bh.type=a6.preType;bn=a(bh.relatedTarget).closest(a6.selector)[0]}if(!bn||bn!==bj){bk.push({elem:bj,handleObj:a6,level:bd.level})}}}}for(bi=0,bf=bk.length;bi<bf;bi++){bb=bk[bi];if(a9&&bb.level>a9){break}bh.currentTarget=bb.elem;bh.data=bb.handleObj.data;bh.handleObj=bb.handleObj;bl=bb.handleObj.origHandler.apply(bb.elem,arguments);if(bl===false||bh.isPropagationStopped()){a9=bb.level;if(bl===false){be=false}if(bh.isImmediatePropagationStopped()){break}}}return be}function o(a7,a6){return(a7&&a7!=="*"?a7+".":"")+a6.replace(I,"`").replace(W,"&")}a.each(("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error").split(" "),function(a7,a6){a.fn[a6]=function(a9,a8){if(a8==null){a8=a9;a9=null}return arguments.length>0?this.bind(a6,a9,a8):this.trigger(a6)};if(a.attrFn){a.attrFn[a6]=true}});\n/*\n * Sizzle CSS Selector Engine\n * Copyright 2011, The Dojo Foundation\n * Released under the MIT, BSD, and GPL Licenses.\n * More information: http://sizzlejs.com/\n */\n(function(){var bl=/((?:\\((?:\\([^()]+\\)|[^()]+)+\\)|\\[(?:\\[[^\\[\\]]*\\]|[\'"][^\'"]*[\'"]|[^\\[\\]\'"]+)+\\]|\\\\.|[^ >+~,(\\[\\\\]+)+|[>+~])(\\s*,\\s*)?((?:.|\\r|\\n)*)/g,be=0,a9=Object.prototype.toString,bk=false,bd=true;[0,0].sort(function(){bd=false;return 0});var a7=function(bs,bn,bv,bw){bv=bv||[];bn=bn||ag;var by=bn;if(bn.nodeType!==1&&bn.nodeType!==9){return[]}if(!bs||typeof bs!=="string"){return bv}var bp,bA,bD,bo,bz,bC,bB,bu,br=true,bq=a7.isXML(bn),bt=[],bx=bs;do{bl.exec("");bp=bl.exec(bx);if(bp){bx=bp[3];bt.push(bp[1]);if(bp[2]){bo=bp[3];break}}}while(bp);if(bt.length>1&&bf.exec(bs)){if(bt.length===2&&ba.relative[bt[0]]){bA=bc(bt[0]+bt[1],bn)}else{bA=ba.relative[bt[0]]?[bn]:a7(bt.shift(),bn);while(bt.length){bs=bt.shift();if(ba.relative[bs]){bs+=bt.shift()}bA=bc(bs,bA)}}}else{if(!bw&&bt.length>1&&bn.nodeType===9&&!bq&&ba.match.ID.test(bt[0])&&!ba.match.ID.test(bt[bt.length-1])){bz=a7.find(bt.shift(),bn,bq);bn=bz.expr?a7.filter(bz.expr,bz.set)[0]:bz.set[0]}if(bn){bz=bw?{expr:bt.pop(),set:a6(bw)}:a7.find(bt.pop(),bt.length===1&&(bt[0]==="~"||bt[0]==="+")&&bn.parentNode?bn.parentNode:bn,bq);bA=bz.expr?a7.filter(bz.expr,bz.set):bz.set;if(bt.length>0){bD=a6(bA)}else{br=false}while(bt.length){bC=bt.pop();bB=bC;if(!ba.relative[bC]){bC=""}else{bB=bt.pop()}if(bB==null){bB=bn}ba.relative[bC](bD,bB,bq)}}else{bD=bt=[]}}if(!bD){bD=bA}if(!bD){a7.error(bC||bs)}if(a9.call(bD)==="[object Array]"){if(!br){bv.push.apply(bv,bD)}else{if(bn&&bn.nodeType===1){for(bu=0;bD[bu]!=null;bu++){if(bD[bu]&&(bD[bu]===true||bD[bu].nodeType===1&&a7.contains(bn,bD[bu]))){bv.push(bA[bu])}}}else{for(bu=0;bD[bu]!=null;bu++){if(bD[bu]&&bD[bu].nodeType===1){bv.push(bA[bu])}}}}}else{a6(bD,bv)}if(bo){a7(bo,by,bv,bw);a7.uniqueSort(bv)}return bv};a7.uniqueSort=function(bo){if(a8){bk=bd;bo.sort(a8);if(bk){for(var bn=1;bn<bo.length;bn++){if(bo[bn]===bo[bn-1]){bo.splice(bn--,1)}}}}return bo};a7.matches=function(bn,bo){return a7(bn,null,null,bo)};a7.matchesSelector=function(bn,bo){return a7(bo,null,null,[bn]).length>0};a7.find=function(bu,bn,bv){var bt;if(!bu){return[]}for(var bq=0,bp=ba.order.length;bq<bp;bq++){var br,bs=ba.order[bq];if((br=ba.leftMatch[bs].exec(bu))){var bo=br[1];br.splice(1,1);if(bo.substr(bo.length-1)!=="\\\\"){br[1]=(br[1]||"").replace(/\\\\/g,"");bt=ba.find[bs](br,bn,bv);if(bt!=null){bu=bu.replace(ba.match[bs],"");break}}}}if(!bt){bt=typeof bn.getElementsByTagName!=="undefined"?bn.getElementsByTagName("*"):[]}return{set:bt,expr:bu}};a7.filter=function(by,bx,bB,br){var bt,bn,bp=by,bD=[],bv=bx,bu=bx&&bx[0]&&a7.isXML(bx[0]);while(by&&bx.length){for(var bw in ba.filter){if((bt=ba.leftMatch[bw].exec(by))!=null&&bt[2]){var bC,bA,bo=ba.filter[bw],bq=bt[1];bn=false;bt.splice(1,1);if(bq.substr(bq.length-1)==="\\\\"){continue}if(bv===bD){bD=[]}if(ba.preFilter[bw]){bt=ba.preFilter[bw](bt,bv,bB,bD,br,bu);if(!bt){bn=bC=true}else{if(bt===true){continue}}}if(bt){for(var bs=0;(bA=bv[bs])!=null;bs++){if(bA){bC=bo(bA,bt,bs,bv);var bz=br^!!bC;if(bB&&bC!=null){if(bz){bn=true}else{bv[bs]=false}}else{if(bz){bD.push(bA);bn=true}}}}}if(bC!==G){if(!bB){bv=bD}by=by.replace(ba.match[bw],"");if(!bn){return[]}break}}}if(by===bp){if(bn==null){a7.error(by)}else{break}}bp=by}return bv};a7.error=function(bn){throw"Syntax error, unrecognized expression: "+bn};var ba=a7.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\\w\\u00c0-\\uFFFF\\-]|\\\\.)+)/,CLASS:/\\.((?:[\\w\\u00c0-\\uFFFF\\-]|\\\\.)+)/,NAME:/\\[name=[\'"]*((?:[\\w\\u00c0-\\uFFFF\\-]|\\\\.)+)[\'"]*\\]/,ATTR:/\\[\\s*((?:[\\w\\u00c0-\\uFFFF\\-]|\\\\.)+)\\s*(?:(\\S?=)\\s*(?:([\'"])(.*?)\\3|(#?(?:[\\w\\u00c0-\\uFFFF\\-]|\\\\.)*)|)|)\\s*\\]/,TAG:/^((?:[\\w\\u00c0-\\uFFFF\\*\\-]|\\\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\\(\\s*(even|odd|(?:[+\\-]?\\d+|(?:[+\\-]?\\d*)?n\\s*(?:[+\\-]\\s*\\d+)?))\\s*\\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\\((\\d*)\\))?(?=[^\\-]|$)/,PSEUDO:/:((?:[\\w\\u00c0-\\uFFFF\\-]|\\\\.)+)(?:\\(([\'"]?)((?:\\([^\\)]+\\)|[^\\(\\)]*)+)\\2\\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(bn){return bn.getAttribute("href")}},relative:{"+":function(bt,bo){var bq=typeof bo==="string",bs=bq&&!/\\W/.test(bo),bu=bq&&!bs;if(bs){bo=bo.toLowerCase()}for(var bp=0,bn=bt.length,br;bp<bn;bp++){if((br=bt[bp])){while((br=br.previousSibling)&&br.nodeType!==1){}bt[bp]=bu||br&&br.nodeName.toLowerCase()===bo?br||false:br===bo}}if(bu){a7.filter(bo,bt,true)}},">":function(bt,bo){var bs,br=typeof bo==="string",bp=0,bn=bt.length;if(br&&!/\\W/.test(bo)){bo=bo.toLowerCase();for(;bp<bn;bp++){bs=bt[bp];if(bs){var bq=bs.parentNode;bt[bp]=bq.nodeName.toLowerCase()===bo?bq:false}}}else{for(;bp<bn;bp++){bs=bt[bp];if(bs){bt[bp]=br?bs.parentNode:bs.parentNode===bo}}if(br){a7.filter(bo,bt,true)}}},"":function(bq,bo,bs){var br,bp=be++,bn=bm;if(typeof bo==="string"&&!/\\W/.test(bo)){bo=bo.toLowerCase();br=bo;bn=bj}bn("parentNode",bo,bp,bq,br,bs)},"~":function(bq,bo,bs){var br,bp=be++,bn=bm;if(typeof bo==="string"&&!/\\W/.test(bo)){bo=bo.toLowerCase();br=bo;bn=bj}bn("previousSibling",bo,bp,bq,br,bs)}},find:{ID:function(bo,bp,bq){if(typeof bp.getElementById!=="undefined"&&!bq){var bn=bp.getElementById(bo[1]);return bn&&bn.parentNode?[bn]:[]}},NAME:function(bp,bs){if(typeof bs.getElementsByName!=="undefined"){var bo=[],br=bs.getElementsByName(bp[1]);for(var bq=0,bn=br.length;bq<bn;bq++){if(br[bq].getAttribute("name")===bp[1]){bo.push(br[bq])}}return bo.length===0?null:bo}},TAG:function(bn,bo){if(typeof bo.getElementsByTagName!=="undefined"){return bo.getElementsByTagName(bn[1])}}},preFilter:{CLASS:function(bq,bo,bp,bn,bt,bu){bq=" "+bq[1].replace(/\\\\/g,"")+" ";if(bu){return bq}for(var br=0,bs;(bs=bo[br])!=null;br++){if(bs){if(bt^(bs.className&&(" "+bs.className+" ").replace(/[\\t\\n\\r]/g," ").indexOf(bq)>=0)){if(!bp){bn.push(bs)}}else{if(bp){bo[br]=false}}}}return false},ID:function(bn){return bn[1].replace(/\\\\/g,"")},TAG:function(bo,bn){return bo[1].toLowerCase()},CHILD:function(bn){if(bn[1]==="nth"){if(!bn[2]){a7.error(bn[0])}bn[2]=bn[2].replace(/^\\+|\\s*/g,"");var bo=/(-?)(\\d*)(?:n([+\\-]?\\d*))?/.exec(bn[2]==="even"&&"2n"||bn[2]==="odd"&&"2n+1"||!/\\D/.test(bn[2])&&"0n+"+bn[2]||bn[2]);bn[2]=(bo[1]+(bo[2]||1))-0;bn[3]=bo[3]-0}else{if(bn[2]){a7.error(bn[0])}}bn[0]=be++;return bn},ATTR:function(br,bo,bp,bn,bs,bt){var bq=br[1]=br[1].replace(/\\\\/g,"");if(!bt&&ba.attrMap[bq]){br[1]=ba.attrMap[bq]}br[4]=(br[4]||br[5]||"").replace(/\\\\/g,"");if(br[2]==="~="){br[4]=" "+br[4]+" "}return br},PSEUDO:function(br,bo,bp,bn,bs){if(br[1]==="not"){if((bl.exec(br[3])||"").length>1||/^\\w/.test(br[3])){br[3]=a7(br[3],null,null,bo)}else{var bq=a7.filter(br[3],bo,bp,true^bs);if(!bp){bn.push.apply(bn,bq)}return false}}else{if(ba.match.POS.test(br[0])||ba.match.CHILD.test(br[0])){return true}}return br},POS:function(bn){bn.unshift(true);return bn}},filters:{enabled:function(bn){return bn.disabled===false&&bn.type!=="hidden"},disabled:function(bn){return bn.disabled===true},checked:function(bn){return bn.checked===true},selected:function(bn){bn.parentNode.selectedIndex;return bn.selected===true},parent:function(bn){return !!bn.firstChild},empty:function(bn){return !bn.firstChild},has:function(bp,bo,bn){return !!a7(bn[3],bp).length},header:function(bn){return(/h\\d/i).test(bn.nodeName)},text:function(bn){return"text"===bn.type},radio:function(bn){return"radio"===bn.type},checkbox:function(bn){return"checkbox"===bn.type},file:function(bn){return"file"===bn.type},password:function(bn){return"password"===bn.type},submit:function(bn){return"submit"===bn.type},image:function(bn){return"image"===bn.type},reset:function(bn){return"reset"===bn.type},button:function(bn){return"button"===bn.type||bn.nodeName.toLowerCase()==="button"},input:function(bn){return(/input|select|textarea|button/i).test(bn.nodeName)}},setFilters:{first:function(bo,bn){return bn===0},last:function(bp,bo,bn,bq){return bo===bq.length-1},even:function(bo,bn){return bn%2===0},odd:function(bo,bn){return bn%2===1},lt:function(bp,bo,bn){return bo<bn[3]-0},gt:function(bp,bo,bn){return bo>bn[3]-0},nth:function(bp,bo,bn){return bn[3]-0===bo},eq:function(bp,bo,bn){return bn[3]-0===bo}},filter:{PSEUDO:function(bp,bu,bt,bv){var bn=bu[1],bo=ba.filters[bn];if(bo){return bo(bp,bt,bu,bv)}else{if(bn==="contains"){return(bp.textContent||bp.innerText||a7.getText([bp])||"").indexOf(bu[3])>=0}else{if(bn==="not"){var bq=bu[3];for(var bs=0,br=bq.length;bs<br;bs++){if(bq[bs]===bp){return false}}return true}else{a7.error(bn)}}}},CHILD:function(bn,bq){var bt=bq[1],bo=bn;switch(bt){case"only":case"first":while((bo=bo.previousSibling)){if(bo.nodeType===1){return false}}if(bt==="first"){return true}bo=bn;case"last":while((bo=bo.nextSibling)){if(bo.nodeType===1){return false}}return true;case"nth":var bp=bq[2],bw=bq[3];if(bp===1&&bw===0){return true}var bs=bq[0],bv=bn.parentNode;if(bv&&(bv.sizcache!==bs||!bn.nodeIndex)){var br=0;for(bo=bv.firstChild;bo;bo=bo.nextSibling){if(bo.nodeType===1){bo.nodeIndex=++br}}bv.sizcache=bs}var bu=bn.nodeIndex-bw;if(bp===0){return bu===0}else{return(bu%bp===0&&bu/bp>=0)}}},ID:function(bo,bn){return bo.nodeType===1&&bo.getAttribute("id")===bn},TAG:function(bo,bn){return(bn==="*"&&bo.nodeType===1)||bo.nodeName.toLowerCase()===bn},CLASS:function(bo,bn){return(" "+(bo.className||bo.getAttribute("class"))+" ").indexOf(bn)>-1},ATTR:function(bs,bq){var bp=bq[1],bn=ba.attrHandle[bp]?ba.attrHandle[bp](bs):bs[bp]!=null?bs[bp]:bs.getAttribute(bp),bt=bn+"",br=bq[2],bo=bq[4];return bn==null?br==="!=":br==="="?bt===bo:br==="*="?bt.indexOf(bo)>=0:br==="~="?(" "+bt+" ").indexOf(bo)>=0:!bo?bt&&bn!==false:br==="!="?bt!==bo:br==="^="?bt.indexOf(bo)===0:br==="$="?bt.substr(bt.length-bo.length)===bo:br==="|="?bt===bo||bt.substr(0,bo.length+1)===bo+"-":false},POS:function(br,bo,bp,bs){var bn=bo[2],bq=ba.setFilters[bn];if(bq){return bq(br,bp,bo,bs)}}}};var bf=ba.match.POS,bb=function(bo,bn){return"\\\\"+(bn-0+1)};for(var bi in ba.match){ba.match[bi]=new RegExp(ba.match[bi].source+(/(?![^\\[]*\\])(?![^\\(]*\\))/.source));ba.leftMatch[bi]=new RegExp(/(^(?:.|\\r|\\n)*?)/.source+ba.match[bi].source.replace(/\\\\(\\d+)/g,bb))}var a6=function(bo,bn){bo=Array.prototype.slice.call(bo,0);if(bn){bn.push.apply(bn,bo);return bn}return bo};try{Array.prototype.slice.call(ag.documentElement.childNodes,0)[0].nodeType}catch(bg){a6=function(br,bq){var bp=0,bo=bq||[];if(a9.call(br)==="[object Array]"){Array.prototype.push.apply(bo,br)}else{if(typeof br.length==="number"){for(var bn=br.length;bp<bn;bp++){bo.push(br[bp])}}else{for(;br[bp];bp++){bo.push(br[bp])}}}return bo}}var a8,bh;if(ag.documentElement.compareDocumentPosition){a8=function(bo,bn){if(bo===bn){bk=true;return 0}if(!bo.compareDocumentPosition||!bn.compareDocumentPosition){return bo.compareDocumentPosition?-1:1}return bo.compareDocumentPosition(bn)&4?-1:1}}else{a8=function(bw,bv){var bt,bo,bq=[],bn=[],bs=bw.parentNode,bu=bv.parentNode,bx=bs;if(bw===bv){bk=true;return 0}else{if(bs===bu){return bh(bw,bv)}else{if(!bs){return -1}else{if(!bu){return 1}}}}while(bx){bq.unshift(bx);bx=bx.parentNode}bx=bu;while(bx){bn.unshift(bx);bx=bx.parentNode}bt=bq.length;bo=bn.length;for(var br=0;br<bt&&br<bo;br++){if(bq[br]!==bn[br]){return bh(bq[br],bn[br])}}return br===bt?bh(bw,bn[br],-1):bh(bq[br],bv,1)};bh=function(bo,bn,bp){if(bo===bn){return bp}var bq=bo.nextSibling;while(bq){if(bq===bn){return -1}bq=bq.nextSibling}return 1}}a7.getText=function(bn){var bo="",bq;for(var bp=0;bn[bp];bp++){bq=bn[bp];if(bq.nodeType===3||bq.nodeType===4){bo+=bq.nodeValue}else{if(bq.nodeType!==8){bo+=a7.getText(bq.childNodes)}}}return bo};(function(){var bo=ag.createElement("div"),bp="script"+(new Date()).getTime(),bn=ag.documentElement;bo.innerHTML="<a name=\'"+bp+"\'/>";bn.insertBefore(bo,bn.firstChild);if(ag.getElementById(bp)){ba.find.ID=function(br,bs,bt){if(typeof bs.getElementById!=="undefined"&&!bt){var bq=bs.getElementById(br[1]);return bq?bq.id===br[1]||typeof bq.getAttributeNode!=="undefined"&&bq.getAttributeNode("id").nodeValue===br[1]?[bq]:G:[]}};ba.filter.ID=function(bs,bq){var br=typeof bs.getAttributeNode!=="undefined"&&bs.getAttributeNode("id");return bs.nodeType===1&&br&&br.nodeValue===bq}}bn.removeChild(bo);bn=bo=null})();(function(){var bn=ag.createElement("div");bn.appendChild(ag.createComment(""));if(bn.getElementsByTagName("*").length>0){ba.find.TAG=function(bo,bs){var br=bs.getElementsByTagName(bo[1]);if(bo[1]==="*"){var bq=[];for(var bp=0;br[bp];bp++){if(br[bp].nodeType===1){bq.push(br[bp])}}br=bq}return br}}bn.innerHTML="<a href=\'#\'></a>";if(bn.firstChild&&typeof bn.firstChild.getAttribute!=="undefined"&&bn.firstChild.getAttribute("href")!=="#"){ba.attrHandle.href=function(bo){return bo.getAttribute("href",2)}}bn=null})();if(ag.querySelectorAll){(function(){var bn=a7,bq=ag.createElement("div"),bp="__sizzle__";bq.innerHTML="<p class=\'TEST\'></p>";if(bq.querySelectorAll&&bq.querySelectorAll(".TEST").length===0){return}a7=function(bA,bs,bv,bz){bs=bs||ag;if(!bz&&!a7.isXML(bs)){var by=/^(\\w+$)|^\\.([\\w\\-]+$)|^#([\\w\\-]+$)/.exec(bA);if(by&&(bs.nodeType===1||bs.nodeType===9)){if(by[1]){return a6(bs.getElementsByTagName(bA),bv)}else{if(by[2]&&ba.find.CLASS&&bs.getElementsByClassName){return a6(bs.getElementsByClassName(by[2]),bv)}}}if(bs.nodeType===9){if(bA==="body"&&bs.body){return a6([bs.body],bv)}else{if(by&&by[3]){var bu=bs.getElementById(by[3]);if(bu&&bu.parentNode){if(bu.id===by[3]){return a6([bu],bv)}}else{return a6([],bv)}}}try{return a6(bs.querySelectorAll(bA),bv)}catch(bw){}}else{if(bs.nodeType===1&&bs.nodeName.toLowerCase()!=="object"){var bt=bs.getAttribute("id"),br=bt||bp,bC=bs.parentNode,bB=/^\\s*[+~]/.test(bA);if(!bt){bs.setAttribute("id",br)}else{br=br.replace(/\'/g,"\\\\$&")}if(bB&&bC){bs=bs.parentNode}try{if(!bB||bC){return a6(bs.querySelectorAll("[id=\'"+br+"\'] "+bA),bv)}}catch(bx){}finally{if(!bt){bs.removeAttribute("id")}}}}}return bn(bA,bs,bv,bz)};for(var bo in bn){a7[bo]=bn[bo]}bq=null})()}(function(){var bn=ag.documentElement,bp=bn.matchesSelector||bn.mozMatchesSelector||bn.webkitMatchesSelector||bn.msMatchesSelector,bo=false;try{bp.call(ag.documentElement,"[test!=\'\']:sizzle")}catch(bq){bo=true}if(bp){a7.matchesSelector=function(br,bt){bt=bt.replace(/\\=\\s*([^\'"\\]]*)\\s*\\]/g,"=\'$1\']");if(!a7.isXML(br)){try{if(bo||!ba.match.PSEUDO.test(bt)&&!/!=/.test(bt)){return bp.call(br,bt)}}catch(bs){}}return a7(bt,null,null,[br]).length>0}}})();(function(){var bn=ag.createElement("div");bn.innerHTML="<div class=\'test e\'></div><div class=\'test\'></div>";if(!bn.getElementsByClassName||bn.getElementsByClassName("e").length===0){return}bn.lastChild.className="e";if(bn.getElementsByClassName("e").length===1){return}ba.order.splice(1,0,"CLASS");ba.find.CLASS=function(bo,bp,bq){if(typeof bp.getElementsByClassName!=="undefined"&&!bq){return bp.getElementsByClassName(bo[1])}};bn=null})();function bj(bo,bt,bs,bw,bu,bv){for(var bq=0,bp=bw.length;bq<bp;bq++){var bn=bw[bq];if(bn){var br=false;bn=bn[bo];while(bn){if(bn.sizcache===bs){br=bw[bn.sizset];break}if(bn.nodeType===1&&!bv){bn.sizcache=bs;bn.sizset=bq}if(bn.nodeName.toLowerCase()===bt){br=bn;break}bn=bn[bo]}bw[bq]=br}}}function bm(bo,bt,bs,bw,bu,bv){for(var bq=0,bp=bw.length;bq<bp;bq++){var bn=bw[bq];if(bn){var br=false;bn=bn[bo];while(bn){if(bn.sizcache===bs){br=bw[bn.sizset];break}if(bn.nodeType===1){if(!bv){bn.sizcache=bs;bn.sizset=bq}if(typeof bt!=="string"){if(bn===bt){br=true;break}}else{if(a7.filter(bt,[bn]).length>0){br=bn;break}}}bn=bn[bo]}bw[bq]=br}}}if(ag.documentElement.contains){a7.contains=function(bo,bn){return bo!==bn&&(bo.contains?bo.contains(bn):true)}}else{if(ag.documentElement.compareDocumentPosition){a7.contains=function(bo,bn){return !!(bo.compareDocumentPosition(bn)&16)}}else{a7.contains=function(){return false}}}a7.isXML=function(bn){var bo=(bn?bn.ownerDocument||bn:0).documentElement;return bo?bo.nodeName!=="HTML":false};var bc=function(bn,bu){var bs,bq=[],br="",bp=bu.nodeType?[bu]:bu;while((bs=ba.match.PSEUDO.exec(bn))){br+=bs[0];bn=bn.replace(ba.match.PSEUDO,"")}bn=ba.relative[bn]?bn+"*":bn;for(var bt=0,bo=bp.length;bt<bo;bt++){a7(bn,bp[bt],bq)}return a7.filter(br,bq)};a.find=a7;a.expr=a7.selectors;a.expr[":"]=a.expr.filters;a.unique=a7.uniqueSort;a.text=a7.getText;a.isXMLDoc=a7.isXML;a.contains=a7.contains})();var S=/Until$/,ad=/^(?:parents|prevUntil|prevAll)/,aP=/,/,a1=/^.[^:#\\[\\.,]*$/,K=Array.prototype.slice,E=a.expr.match.POS,ai={children:true,contents:true,next:true,prev:true};a.fn.extend({find:function(a6){var a8=this.pushStack("","find",a6),bb=0;for(var a9=0,a7=this.length;a9<a7;a9++){bb=a8.length;a.find(a6,this[a9],a8);if(a9>0){for(var bc=bb;bc<a8.length;bc++){for(var ba=0;ba<bb;ba++){if(a8[ba]===a8[bc]){a8.splice(bc--,1);break}}}}}return a8},has:function(a7){var a6=a(a7);return this.filter(function(){for(var a9=0,a8=a6.length;a9<a8;a9++){if(a.contains(this,a6[a9])){return true}}})},not:function(a6){return this.pushStack(an(this,a6,false),"not",a6)},filter:function(a6){return this.pushStack(an(this,a6,true),"filter",a6)},is:function(a6){return !!a6&&a.filter(a6,this).length>0},closest:function(bg,a7){var bd=[],ba,a8,bf=this[0];if(a.isArray(bg)){var bc,a9,bb={},a6=1;if(bf&&bg.length){for(ba=0,a8=bg.length;ba<a8;ba++){a9=bg[ba];if(!bb[a9]){bb[a9]=a.expr.match.POS.test(a9)?a(a9,a7||this.context):a9}}while(bf&&bf.ownerDocument&&bf!==a7){for(a9 in bb){bc=bb[a9];if(bc.jquery?bc.index(bf)>-1:a(bf).is(bc)){bd.push({selector:a9,elem:bf,level:a6})}}bf=bf.parentNode;a6++}}return bd}var be=E.test(bg)?a(bg,a7||this.context):null;for(ba=0,a8=this.length;ba<a8;ba++){bf=this[ba];while(bf){if(be?be.index(bf)>-1:a.find.matchesSelector(bf,bg)){bd.push(bf);break}else{bf=bf.parentNode;if(!bf||!bf.ownerDocument||bf===a7){break}}}}bd=bd.length>1?a.unique(bd):bd;return this.pushStack(bd,"closest",bg)},index:function(a6){if(!a6||typeof a6==="string"){return a.inArray(this[0],a6?a(a6):this.parent().children())}return a.inArray(a6.jquery?a6[0]:a6,this)},add:function(a6,a7){var a9=typeof a6==="string"?a(a6,a7):a.makeArray(a6),a8=a.merge(this.get(),a9);return this.pushStack(A(a9[0])||A(a8[0])?a8:a.unique(a8))},andSelf:function(){return this.add(this.prevObject)}});function A(a6){return !a6||!a6.parentNode||a6.parentNode.nodeType===11}a.each({parent:function(a7){var a6=a7.parentNode;return a6&&a6.nodeType!==11?a6:null},parents:function(a6){return a.dir(a6,"parentNode")},parentsUntil:function(a7,a6,a8){return a.dir(a7,"parentNode",a8)},next:function(a6){return a.nth(a6,2,"nextSibling")},prev:function(a6){return a.nth(a6,2,"previousSibling")},nextAll:function(a6){return a.dir(a6,"nextSibling")},prevAll:function(a6){return a.dir(a6,"previousSibling")},nextUntil:function(a7,a6,a8){return a.dir(a7,"nextSibling",a8)},prevUntil:function(a7,a6,a8){return a.dir(a7,"previousSibling",a8)},siblings:function(a6){return a.sibling(a6.parentNode.firstChild,a6)},children:function(a6){return a.sibling(a6.firstChild)},contents:function(a6){return a.nodeName(a6,"iframe")?a6.contentDocument||a6.contentWindow.document:a.makeArray(a6.childNodes)}},function(a6,a7){a.fn[a6]=function(bb,a8){var ba=a.map(this,a7,bb),a9=K.call(arguments);if(!S.test(a6)){a8=bb}if(a8&&typeof a8==="string"){ba=a.filter(a8,ba)}ba=this.length>1&&!ai[a6]?a.unique(ba):ba;if((this.length>1||aP.test(a8))&&ad.test(a6)){ba=ba.reverse()}return this.pushStack(ba,a6,a9.join(","))}});a.extend({filter:function(a8,a6,a7){if(a7){a8=":not("+a8+")"}return a6.length===1?a.find.matchesSelector(a6[0],a8)?[a6[0]]:[]:a.find.matches(a8,a6)},dir:function(a8,a7,ba){var a6=[],a9=a8[a7];while(a9&&a9.nodeType!==9&&(ba===G||a9.nodeType!==1||!a(a9).is(ba))){if(a9.nodeType===1){a6.push(a9)}a9=a9[a7]}return a6},nth:function(ba,a6,a8,a9){a6=a6||1;var a7=0;for(;ba;ba=ba[a8]){if(ba.nodeType===1&&++a7===a6){break}}return ba},sibling:function(a8,a7){var a6=[];for(;a8;a8=a8.nextSibling){if(a8.nodeType===1&&a8!==a7){a6.push(a8)}}return a6}});function an(a9,a8,a6){if(a.isFunction(a8)){return a.grep(a9,function(bb,ba){var bc=!!a8.call(bb,ba,bb);return bc===a6})}else{if(a8.nodeType){return a.grep(a9,function(bb,ba){return(bb===a8)===a6})}else{if(typeof a8==="string"){var a7=a.grep(a9,function(ba){return ba.nodeType===1});if(a1.test(a8)){return a.filter(a8,a7,!a6)}else{a8=a.filter(a8,a7)}}}}return a.grep(a9,function(bb,ba){return(a.inArray(bb,a8)>=0)===a6})}var X=/ jQuery\\d+="(?:\\d+|null)"/g,ae=/^\\s+/,M=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\\w:]+)[^>]*)\\/>/ig,c=/<([\\w:]+)/,v=/<tbody/i,P=/<|&#?\\w+;/,J=/<(?:script|object|embed|option|style)/i,n=/checked\\s*(?:[^=]|=\\s*.checked.)/i,ah={option:[1,"<select multiple=\'multiple\'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]};ah.optgroup=ah.option;ah.tbody=ah.tfoot=ah.colgroup=ah.caption=ah.thead;ah.th=ah.td;if(!a.support.htmlSerialize){ah._default=[1,"div<div>","</div>"]}a.fn.extend({text:function(a6){if(a.isFunction(a6)){return this.each(function(a8){var a7=a(this);a7.text(a6.call(this,a8,a7.text()))})}if(typeof a6!=="object"&&a6!==G){return this.empty().append((this[0]&&this[0].ownerDocument||ag).createTextNode(a6))}return a.text(this)},wrapAll:function(a6){if(a.isFunction(a6)){return this.each(function(a8){a(this).wrapAll(a6.call(this,a8))})}if(this[0]){var a7=a(a6,this[0].ownerDocument).eq(0).clone(true);if(this[0].parentNode){a7.insertBefore(this[0])}a7.map(function(){var a8=this;while(a8.firstChild&&a8.firstChild.nodeType===1){a8=a8.firstChild}return a8}).append(this)}return this},wrapInner:function(a6){if(a.isFunction(a6)){return this.each(function(a7){a(this).wrapInner(a6.call(this,a7))})}return this.each(function(){var a7=a(this),a8=a7.contents();if(a8.length){a8.wrapAll(a6)}else{a7.append(a6)}})},wrap:function(a6){return this.each(function(){a(this).wrapAll(a6)})},unwrap:function(){return this.parent().each(function(){if(!a.nodeName(this,"body")){a(this).replaceWith(this.childNodes)}}).end()},append:function(){return this.domManip(arguments,true,function(a6){if(this.nodeType===1){this.appendChild(a6)}})},prepend:function(){return this.domManip(arguments,true,function(a6){if(this.nodeType===1){this.insertBefore(a6,this.firstChild)}})},before:function(){if(this[0]&&this[0].parentNode){return this.domManip(arguments,false,function(a7){this.parentNode.insertBefore(a7,this)})}else{if(arguments.length){var a6=a(arguments[0]);a6.push.apply(a6,this.toArray());return this.pushStack(a6,"before",arguments)}}},after:function(){if(this[0]&&this[0].parentNode){return this.domManip(arguments,false,function(a7){this.parentNode.insertBefore(a7,this.nextSibling)})}else{if(arguments.length){var a6=this.pushStack(this,"after",arguments);a6.push.apply(a6,a(arguments[0]).toArray());return a6}}},remove:function(a6,a9){for(var a7=0,a8;(a8=this[a7])!=null;a7++){if(!a6||a.filter(a6,[a8]).length){if(!a9&&a8.nodeType===1){a.cleanData(a8.getElementsByTagName("*"));a.cleanData([a8])}if(a8.parentNode){a8.parentNode.removeChild(a8)}}}return this},empty:function(){for(var a6=0,a7;(a7=this[a6])!=null;a6++){if(a7.nodeType===1){a.cleanData(a7.getElementsByTagName("*"))}while(a7.firstChild){a7.removeChild(a7.firstChild)}}return this},clone:function(a7,a6){a7=a7==null?true:a7;a6=a6==null?a7:a6;return this.map(function(){return a.clone(this,a7,a6)})},html:function(a8){if(a8===G){return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(X,""):null}else{if(typeof a8==="string"&&!J.test(a8)&&(a.support.leadingWhitespace||!ae.test(a8))&&!ah[(c.exec(a8)||["",""])[1].toLowerCase()]){a8=a8.replace(M,"<$1></$2>");try{for(var a7=0,a6=this.length;a7<a6;a7++){if(this[a7].nodeType===1){a.cleanData(this[a7].getElementsByTagName("*"));this[a7].innerHTML=a8}}}catch(a9){this.empty().append(a8)}}else{if(a.isFunction(a8)){this.each(function(bb){var ba=a(this);ba.html(a8.call(this,bb,ba.html()))})}else{this.empty().append(a8)}}}return this},replaceWith:function(a6){if(this[0]&&this[0].parentNode){if(a.isFunction(a6)){return this.each(function(a9){var a8=a(this),a7=a8.html();a8.replaceWith(a6.call(this,a9,a7))})}if(typeof a6!=="string"){a6=a(a6).detach()}return this.each(function(){var a8=this.nextSibling,a7=this.parentNode;a(this).remove();if(a8){a(a8).before(a6)}else{a(a7).append(a6)}})}else{return this.pushStack(a(a.isFunction(a6)?a6():a6),"replaceWith",a6)}},detach:function(a6){return this.remove(a6,true)},domManip:function(bd,bh,bg){var a9,ba,bc,bf,be=bd[0],a7=[];if(!a.support.checkClone&&arguments.length===3&&typeof be==="string"&&n.test(be)){return this.each(function(){a(this).domManip(bd,bh,bg,true)})}if(a.isFunction(be)){return this.each(function(bj){var bi=a(this);bd[0]=be.call(this,bj,bh?bi.html():G);bi.domManip(bd,bh,bg)})}if(this[0]){bf=be&&be.parentNode;if(a.support.parentNode&&bf&&bf.nodeType===11&&bf.childNodes.length===this.length){a9={fragment:bf}}else{a9=a.buildFragment(bd,this,a7)}bc=a9.fragment;if(bc.childNodes.length===1){ba=bc=bc.firstChild}else{ba=bc.firstChild}if(ba){bh=bh&&a.nodeName(ba,"tr");for(var a8=0,a6=this.length,bb=a6-1;a8<a6;a8++){bg.call(bh?aQ(this[a8],ba):this[a8],a9.cacheable||(a6>1&&a8<bb)?a.clone(bc,true,true):bc)}}if(a7.length){a.each(a7,a0)}}return this}});function aQ(a6,a7){return a.nodeName(a6,"table")?(a6.getElementsByTagName("tbody")[0]||a6.appendChild(a6.ownerDocument.createElement("tbody"))):a6}function s(a6,bd){if(bd.nodeType!==1||!a.hasData(a6)){return}var bc=a.expando,a9=a.data(a6),ba=a.data(bd,a9);if((a9=a9[bc])){var be=a9.events;ba=ba[bc]=a.extend({},a9);if(be){delete ba.handle;ba.events={};for(var bb in be){for(var a8=0,a7=be[bb].length;a8<a7;a8++){a.event.add(bd,bb,be[bb][a8],be[bb][a8].data)}}}}}function Y(a7,a6){if(a6.nodeType!==1){return}var a8=a6.nodeName.toLowerCase();a6.clearAttributes();a6.mergeAttributes(a7);if(a8==="object"){a6.outerHTML=a7.outerHTML}else{if(a8==="input"&&(a7.type==="checkbox"||a7.type==="radio")){if(a7.checked){a6.defaultChecked=a6.checked=a7.checked}if(a6.value!==a7.value){a6.value=a7.value}}else{if(a8==="option"){a6.selected=a7.defaultSelected}else{if(a8==="input"||a8==="textarea"){a6.defaultValue=a7.defaultValue}}}}a6.removeAttribute(a.expando)}a.buildFragment=function(bb,a9,a7){var ba,a6,a8,bc=(a9&&a9[0]?a9[0].ownerDocument||a9[0]:ag);if(bb.length===1&&typeof bb[0]==="string"&&bb[0].length<512&&bc===ag&&bb[0].charAt(0)==="<"&&!J.test(bb[0])&&(a.support.checkClone||!n.test(bb[0]))){a6=true;a8=a.fragments[bb[0]];if(a8){if(a8!==1){ba=a8}}}if(!ba){ba=bc.createDocumentFragment();a.clean(bb,bc,ba,a7)}if(a6){a.fragments[bb[0]]=a8?ba:1}return{fragment:ba,cacheable:a6}};a.fragments={};a.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a6,a7){a.fn[a6]=function(a8){var bb=[],be=a(a8),bd=this.length===1&&this[0].parentNode;if(bd&&bd.nodeType===11&&bd.childNodes.length===1&&be.length===1){be[a7](this[0]);return this}else{for(var bc=0,a9=be.length;bc<a9;bc++){var ba=(bc>0?this.clone(true):this).get();a(be[bc])[a7](ba);bb=bb.concat(ba)}return this.pushStack(bb,a6,be.selector)}}});a.extend({clone:function(ba,bc,a8){var bb=ba.cloneNode(true),a6,a7,a9;if(!a.support.noCloneEvent&&(ba.nodeType===1||ba.nodeType===11)&&!a.isXMLDoc(ba)){a6=ba.getElementsByTagName("*");a7=bb.getElementsByTagName("*");for(a9=0;a6[a9];++a9){Y(a6[a9],a7[a9])}Y(ba,bb)}if(bc){s(ba,bb);if(a8&&"getElementsByTagName" in ba){a6=ba.getElementsByTagName("*");a7=bb.getElementsByTagName("*");if(a6.length){for(a9=0;a6[a9];++a9){s(a6[a9],a7[a9])}}}}return bb},clean:function(a8,ba,bh,bc){ba=ba||ag;if(typeof ba.createElement==="undefined"){ba=ba.ownerDocument||ba[0]&&ba[0].ownerDocument||ag}var bi=[];for(var bg=0,bb;(bb=a8[bg])!=null;bg++){if(typeof bb==="number"){bb+=""}if(!bb){continue}if(typeof bb==="string"&&!P.test(bb)){bb=ba.createTextNode(bb)}else{if(typeof bb==="string"){bb=bb.replace(M,"<$1></$2>");var bj=(c.exec(bb)||["",""])[1].toLowerCase(),a9=ah[bj]||ah._default,bf=a9[0],a7=ba.createElement("div");a7.innerHTML=a9[1]+bb+a9[2];while(bf--){a7=a7.lastChild}if(!a.support.tbody){var a6=v.test(bb),be=bj==="table"&&!a6?a7.firstChild&&a7.firstChild.childNodes:a9[1]==="<table>"&&!a6?a7.childNodes:[];for(var bd=be.length-1;bd>=0;--bd){if(a.nodeName(be[bd],"tbody")&&!be[bd].childNodes.length){be[bd].parentNode.removeChild(be[bd])}}}if(!a.support.leadingWhitespace&&ae.test(bb)){a7.insertBefore(ba.createTextNode(ae.exec(bb)[0]),a7.firstChild)}bb=a7.childNodes}}if(bb.nodeType){bi.push(bb)}else{bi=a.merge(bi,bb)}}if(bh){for(bg=0;bi[bg];bg++){if(bc&&a.nodeName(bi[bg],"script")&&(!bi[bg].type||bi[bg].type.toLowerCase()==="text/javascript")){bc.push(bi[bg].parentNode?bi[bg].parentNode.removeChild(bi[bg]):bi[bg])}else{if(bi[bg].nodeType===1){bi.splice.apply(bi,[bg+1,0].concat(a.makeArray(bi[bg].getElementsByTagName("script"))))}bh.appendChild(bi[bg])}}}return bi},cleanData:function(a7){var ba,a8,a6=a.cache,bf=a.expando,bd=a.event.special,bc=a.support.deleteExpando;for(var bb=0,a9;(a9=a7[bb])!=null;bb++){if(a9.nodeName&&a.noData[a9.nodeName.toLowerCase()]){continue}a8=a9[a.expando];if(a8){ba=a6[a8]&&a6[a8][bf];if(ba&&ba.events){for(var be in ba.events){if(bd[be]){a.event.remove(a9,be)}else{a.removeEvent(a9,be,ba.handle)}}if(ba.handle){ba.handle.elem=null}}if(bc){delete a9[a.expando]}else{if(a9.removeAttribute){a9.removeAttribute(a.expando)}}delete a6[a8]}}}});function a0(a6,a7){if(a7.src){a.ajax({url:a7.src,async:false,dataType:"script"})}else{a.globalEval(a7.text||a7.textContent||a7.innerHTML||"")}if(a7.parentNode){a7.parentNode.removeChild(a7)}}var Z=/alpha\\([^)]*\\)/i,af=/opacity=([^)]*)/,aD=/-([a-z])/ig,y=/([A-Z])/g,aS=/^-?\\d+(?:px)?$/i,aZ=/^-?\\d/,aO={position:"absolute",visibility:"hidden",display:"block"},ab=["Left","Right"],aK=["Top","Bottom"],Q,aq,aC,l=function(a6,a7){return a7.toUpperCase()};a.fn.css=function(a6,a7){if(arguments.length===2&&a7===G){return this}return a.access(this,a6,a7,true,function(a9,a8,ba){return ba!==G?a.style(a9,a8,ba):a.css(a9,a8)})};a.extend({cssHooks:{opacity:{get:function(a8,a7){if(a7){var a6=Q(a8,"opacity","opacity");return a6===""?"1":a6}else{return a8.style.opacity}}}},cssNumber:{zIndex:true,fontWeight:true,opacity:true,zoom:true,lineHeight:true},cssProps:{"float":a.support.cssFloat?"cssFloat":"styleFloat"},style:function(a8,a7,bd,a9){if(!a8||a8.nodeType===3||a8.nodeType===8||!a8.style){return}var bc,ba=a.camelCase(a7),a6=a8.style,be=a.cssHooks[ba];a7=a.cssProps[ba]||ba;if(bd!==G){if(typeof bd==="number"&&isNaN(bd)||bd==null){return}if(typeof bd==="number"&&!a.cssNumber[ba]){bd+="px"}if(!be||!("set" in be)||(bd=be.set(a8,bd))!==G){try{a6[a7]=bd}catch(bb){}}}else{if(be&&"get" in be&&(bc=be.get(a8,false,a9))!==G){return bc}return a6[a7]}},css:function(bb,ba,a7){var a9,a8=a.camelCase(ba),a6=a.cssHooks[a8];ba=a.cssProps[a8]||a8;if(a6&&"get" in a6&&(a9=a6.get(bb,true,a7))!==G){return a9}else{if(Q){return Q(bb,ba,a8)}}},swap:function(a9,a8,ba){var a6={};for(var a7 in a8){a6[a7]=a9.style[a7];a9.style[a7]=a8[a7]}ba.call(a9);for(a7 in a8){a9.style[a7]=a6[a7]}},camelCase:function(a6){return a6.replace(aD,l)}});a.curCSS=a.css;a.each(["height","width"],function(a7,a6){a.cssHooks[a6]={get:function(ba,a9,a8){var bb;if(a9){if(ba.offsetWidth!==0){bb=p(ba,a6,a8)}else{a.swap(ba,aO,function(){bb=p(ba,a6,a8)})}if(bb<=0){bb=Q(ba,a6,a6);if(bb==="0px"&&aC){bb=aC(ba,a6,a6)}if(bb!=null){return bb===""||bb==="auto"?"0px":bb}}if(bb<0||bb==null){bb=ba.style[a6];return bb===""||bb==="auto"?"0px":bb}return typeof bb==="string"?bb:bb+"px"}},set:function(a8,a9){if(aS.test(a9)){a9=parseFloat(a9);if(a9>=0){return a9+"px"}}else{return a9}}}});if(!a.support.opacity){a.cssHooks.opacity={get:function(a7,a6){return af.test((a6&&a7.currentStyle?a7.currentStyle.filter:a7.style.filter)||"")?(parseFloat(RegExp.$1)/100)+"":a6?"1":""},set:function(a9,ba){var a8=a9.style;a8.zoom=1;var a6=a.isNaN(ba)?"":"alpha(opacity="+ba*100+")",a7=a8.filter||"";a8.filter=Z.test(a7)?a7.replace(Z,a6):a8.filter+" "+a6}}}if(ag.defaultView&&ag.defaultView.getComputedStyle){aq=function(bb,a6,a9){var a8,ba,a7;a9=a9.replace(y,"-$1").toLowerCase();if(!(ba=bb.ownerDocument.defaultView)){return G}if((a7=ba.getComputedStyle(bb,null))){a8=a7.getPropertyValue(a9);if(a8===""&&!a.contains(bb.ownerDocument.documentElement,bb)){a8=a.style(bb,a9)}}return a8}}if(ag.documentElement.currentStyle){aC=function(ba,a8){var bb,a7=ba.currentStyle&&ba.currentStyle[a8],a6=ba.runtimeStyle&&ba.runtimeStyle[a8],a9=ba.style;if(!aS.test(a7)&&aZ.test(a7)){bb=a9.left;if(a6){ba.runtimeStyle.left=ba.currentStyle.left}a9.left=a8==="fontSize"?"1em":(a7||0);a7=a9.pixelLeft+"px";a9.left=bb;if(a6){ba.runtimeStyle.left=a6}}return a7===""?"auto":a7}}Q=aq||aC;function p(a8,a7,a6){var ba=a7==="width"?ab:aK,a9=a7==="width"?a8.offsetWidth:a8.offsetHeight;if(a6==="border"){return a9}a.each(ba,function(){if(!a6){a9-=parseFloat(a.css(a8,"padding"+this))||0}if(a6==="margin"){a9+=parseFloat(a.css(a8,"margin"+this))||0}else{a9-=parseFloat(a.css(a8,"border"+this+"Width"))||0}});return a9}if(a.expr&&a.expr.filters){a.expr.filters.hidden=function(a8){var a7=a8.offsetWidth,a6=a8.offsetHeight;return(a7===0&&a6===0)||(!a.support.reliableHiddenOffsets&&(a8.style.display||a.css(a8,"display"))==="none")};a.expr.filters.visible=function(a6){return !a.expr.filters.hidden(a6)}}var h=/%20/g,ac=/\\[\\]$/,a5=/\\r?\\n/g,a2=/#.*$/,al=/^(.*?):\\s*(.*?)\\r?$/mg,aG=/^(?:color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,au=/^(?:GET|HEAD)$/,b=/^\\/\\//,H=/\\?/,aN=/<script\\b[^<]*(?:(?!<\\/script>)<[^<]*)*<\\/script>/gi,q=/^(?:select|textarea)/i,f=/\\s+/,a4=/([?&])_=[^&]*/,F=/^(\\w+:)\\/\\/([^\\/?#:]+)(?::(\\d+))?/,z=a.fn.load,R={},r={};function d(a6){return function(ba,bc){if(typeof ba!=="string"){bc=ba;ba="*"}if(a.isFunction(bc)){var a9=ba.toLowerCase().split(f),a8=0,bb=a9.length,a7,bd,be;for(;a8<bb;a8++){a7=a9[a8];be=/^\\+/.test(a7);if(be){a7=a7.substr(1)||"*"}bd=a6[a7]=a6[a7]||[];bd[be?"unshift":"push"](bc)}}}}function az(a7,bg,bb,bf,bd,a9){bd=bd||bg.dataTypes[0];a9=a9||{};a9[bd]=true;var bc=a7[bd],a8=0,a6=bc?bc.length:0,ba=(a7===R),be;for(;a8<a6&&(ba||!be);a8++){be=bc[a8](bg,bb,bf);if(typeof be==="string"){if(a9[be]){be=G}else{bg.dataTypes.unshift(be);be=az(a7,bg,bb,bf,be,a9)}}}if((ba||!be)&&!a9["*"]){be=az(a7,bg,bb,bf,"*",a9)}return be}a.fn.extend({load:function(a8,bb,bc){if(typeof a8!=="string"&&z){return z.apply(this,arguments)}else{if(!this.length){return this}}var ba=a8.indexOf(" ");if(ba>=0){var a6=a8.slice(ba,a8.length);a8=a8.slice(0,ba)}var a9="GET";if(bb){if(a.isFunction(bb)){bc=bb;bb=null}else{if(typeof bb==="object"){bb=a.param(bb,a.ajaxSettings.traditional);a9="POST"}}}var a7=this;a.ajax({url:a8,type:a9,dataType:"html",data:bb,complete:function(bf,bd,be){be=bf.responseText;if(bf.isResolved()){bf.done(function(bg){be=bg});a7.html(a6?a("<div>").append(be.replace(aN,"")).find(a6):be)}if(bc){a7.each(bc,[be,bd,bf])}}});return this},serialize:function(){return a.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?a.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||q.test(this.nodeName)||aG.test(this.type))}).map(function(a6,a7){var a8=a(this).val();return a8==null?null:a.isArray(a8)?a.map(a8,function(ba,a9){return{name:a7.name,value:ba.replace(a5,"\\r\\n")}}):{name:a7.name,value:a8.replace(a5,"\\r\\n")}}).get()}});a.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a6,a7){a.fn[a7]=function(a8){return this.bind(a7,a8)}});a.each(["get","post"],function(a6,a7){a[a7]=function(a8,ba,bb,a9){if(a.isFunction(ba)){a9=a9||bb;bb=ba;ba=null}return a.ajax({type:a7,url:a8,data:ba,success:bb,dataType:a9})}});a.extend({getScript:function(a6,a7){return a.get(a6,null,a7,"script")},getJSON:function(a6,a7,a8){return a.get(a6,a7,a8,"json")},ajaxSetup:function(a6){a.extend(true,a.ajaxSettings,a6);if(a6.context){a.ajaxSettings.context=a6.context}},ajaxSettings:{url:location.href,global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":"*/*"},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":aR.String,"text html":true,"text json":a.parseJSON,"text xml":a.parseXML}},ajaxPrefilter:d(R),ajaxTransport:d(r),ajax:function(ba,a7){if(typeof a7!=="object"){a7=ba;ba=G}a7=a7||{};var be=a.extend(true,{},a.ajaxSettings,a7),bs=(be.context=("context" in a7?a7:a.ajaxSettings).context)||be,bi=bs===be?a.event:a(bs),br=a.Deferred(),bo=a._Deferred(),bc=be.statusCode||{},bj={},bq,a8,bm,bg,bd=ag.location,bf=bd.protocol||"http:",bk,bb=0,bl,a9={readyState:0,setRequestHeader:function(bt,bu){if(bb===0){bj[bt.toLowerCase()]=bu}return this},getAllResponseHeaders:function(){return bb===2?bq:null},getResponseHeader:function(bu){var bt;if(bb===2){if(!a8){a8={};while((bt=al.exec(bq))){a8[bt[1].toLowerCase()]=bt[2]}}bt=a8[bu.toLowerCase()]}return bt||null},abort:function(bt){bt=bt||"abort";if(bm){bm.abort(bt)}bh(0,bt);return this}};function bh(by,bw,bz,bv){if(bb===2){return}bb=2;if(bg){clearTimeout(bg)}bm=G;bq=bv||"";a9.readyState=by?4:0;var bt,bD,bC,bx=bz?aW(be,a9,bz):G,bu,bB;if(by>=200&&by<300||by===304){if(be.ifModified){if((bu=a9.getResponseHeader("Last-Modified"))){a.lastModified[be.url]=bu}if((bB=a9.getResponseHeader("Etag"))){a.etag[be.url]=bB}}if(by===304){bw="notmodified";bt=true}else{try{bD=C(be,bx);bw="success";bt=true}catch(bA){bw="parsererror";bC=bA}}}else{bC=bw;if(by){bw="error";if(by<0){by=0}}}a9.status=by;a9.statusText=bw;if(bt){br.resolveWith(bs,[bD,bw,a9])}else{br.rejectWith(bs,[a9,bw,bC])}a9.statusCode(bc);bc=G;if(be.global){bi.trigger("ajax"+(bt?"Success":"Error"),[a9,be,bt?bD:bC])}bo.resolveWith(bs,[a9,bw]);if(be.global){bi.trigger("ajaxComplete",[a9,be]);if(!(--a.active)){a.event.trigger("ajaxStop")}}}br.promise(a9);a9.success=a9.done;a9.error=a9.fail;a9.complete=bo.done;a9.statusCode=function(bu){if(bu){var bt;if(bb<2){for(bt in bu){bc[bt]=[bc[bt],bu[bt]]}}else{bt=bu[a9.status];a9.then(bt,bt)}}return this};be.url=(""+(ba||be.url)).replace(a2,"").replace(b,bf+"//");be.dataTypes=a.trim(be.dataType||"*").toLowerCase().split(f);if(!be.crossDomain){bk=F.exec(be.url.toLowerCase());be.crossDomain=!!(bk&&(bk[1]!=bf||bk[2]!=bd.hostname||(bk[3]||(bk[1]==="http:"?80:443))!=(bd.port||(bf==="http:"?80:443))))}if(be.data&&be.processData&&typeof be.data!=="string"){be.data=a.param(be.data,be.traditional)}az(R,be,a7,a9);be.type=be.type.toUpperCase();be.hasContent=!au.test(be.type);if(be.global&&a.active++===0){a.event.trigger("ajaxStart")}if(!be.hasContent){if(be.data){be.url+=(H.test(be.url)?"&":"?")+be.data}if(be.cache===false){var a6=a.now(),bp=be.url.replace(a4,"$1_="+a6);be.url=bp+((bp===be.url)?(H.test(be.url)?"&":"?")+"_="+a6:"")}}if(be.data&&be.hasContent&&be.contentType!==false||a7.contentType){bj["content-type"]=be.contentType}if(be.ifModified){if(a.lastModified[be.url]){bj["if-modified-since"]=a.lastModified[be.url]}if(a.etag[be.url]){bj["if-none-match"]=a.etag[be.url]}}bj.accept=be.dataTypes[0]&&be.accepts[be.dataTypes[0]]?be.accepts[be.dataTypes[0]]+(be.dataTypes[0]!=="*"?", */*; q=0.01":""):be.accepts["*"];for(bl in be.headers){bj[bl.toLowerCase()]=be.headers[bl]}if(be.beforeSend&&(be.beforeSend.call(bs,a9,be)===false||bb===2)){bh(0,"abort");a9=false}else{for(bl in {success:1,error:1,complete:1}){a9[bl](be[bl])}bm=az(r,be,a7,a9);if(!bm){bh(-1,"No Transport")}else{bb=a9.readyState=1;if(be.global){bi.trigger("ajaxSend",[a9,be])}if(be.async&&be.timeout>0){bg=setTimeout(function(){a9.abort("timeout")},be.timeout)}try{bm.send(bj,bh)}catch(bn){if(status<2){bh(-1,bn)}else{a.error(bn)}}}}return a9},param:function(a6,a8){var a7=[],ba=function(bb,bc){bc=a.isFunction(bc)?bc():bc;a7[a7.length]=encodeURIComponent(bb)+"="+encodeURIComponent(bc)};if(a8===G){a8=a.ajaxSettings.traditional}if(a.isArray(a6)||a6.jquery){a.each(a6,function(){ba(this.name,this.value)})}else{for(var a9 in a6){u(a9,a6[a9],a8,ba)}}return a7.join("&").replace(h,"+")}});function u(a7,a9,a6,a8){if(a.isArray(a9)&&a9.length){a.each(a9,function(bb,ba){if(a6||ac.test(a7)){a8(a7,ba)}else{u(a7+"["+(typeof ba==="object"||a.isArray(ba)?bb:"")+"]",ba,a6,a8)}})}else{if(!a6&&a9!=null&&typeof a9==="object"){if(a.isArray(a9)||a.isEmptyObject(a9)){a8(a7,"")}else{a.each(a9,function(bb,ba){u(a7+"["+bb+"]",ba,a6,a8)})}}else{a8(a7,a9)}}}a.extend({active:0,lastModified:{},etag:{}});function aW(bf,be,bb){var a7=bf.contents,bd=bf.dataTypes,a8=bf.responseFields,ba,bc,a9,a6;for(bc in a8){if(bc in bb){be[a8[bc]]=bb[bc]}}while(bd[0]==="*"){bd.shift();if(ba===G){ba=be.getResponseHeader("content-type")}}if(ba){for(bc in a7){if(a7[bc]&&a7[bc].test(ba)){bd.unshift(bc);break}}}if(bd[0] in bb){a9=bd[0]}else{for(bc in bb){if(!bd[0]||bf.converters[bc+" "+bd[0]]){a9=bc;break}if(!a6){a6=bc}}a9=a9||a6}if(a9){if(a9!==bd[0]){bd.unshift(a9)}return bb[a9]}}function C(bi,bb){if(bi.dataFilter){bb=bi.dataFilter(bb,bi.dataType)}var bf=bi.dataTypes,bh=bi.converters,bc,a8=bf.length,bd,be=bf[0],a9,ba,bg,a7,a6;for(bc=1;bc<a8;bc++){a9=be;be=bf[bc];if(be==="*"){be=a9}else{if(a9!=="*"&&a9!==be){ba=a9+" "+be;bg=bh[ba]||bh["* "+be];if(!bg){a6=G;for(a7 in bh){bd=a7.split(" ");if(bd[0]===a9||bd[0]==="*"){a6=bh[bd[1]+" "+be];if(a6){a7=bh[a7];if(a7===true){bg=a6}else{if(a6===true){bg=a7}}break}}}}if(!(bg||a6)){a.error("No conversion from "+ba.replace(" "," to "))}if(bg!==true){bb=bg?bg(bb):a6(a7(bb))}}}}return bb}var ak=a.now(),t=/(\\=)\\?(&|$)|()\\?\\?()/i;a.ajaxSetup({jsonp:"callback",jsonpCallback:function(){return a.expando+"_"+(ak++)}});a.ajaxPrefilter("json jsonp",function(be,bb,bd){bd=(typeof be.data==="string");if(be.dataTypes[0]==="jsonp"||bb.jsonpCallback||bb.jsonp!=null||be.jsonp!==false&&(t.test(be.url)||bd&&t.test(be.data))){var bc,a8=be.jsonpCallback=a.isFunction(be.jsonpCallback)?be.jsonpCallback():be.jsonpCallback,ba=aR[a8],a6=be.url,a9=be.data,a7="$1"+a8+"$2";if(be.jsonp!==false){a6=a6.replace(t,a7);if(be.url===a6){if(bd){a9=a9.replace(t,a7)}if(be.data===a9){a6+=(/\\?/.test(a6)?"&":"?")+be.jsonp+"="+a8}}}be.url=a6;be.data=a9;aR[a8]=function(bf){bc=[bf]};be.complete=[function(){aR[a8]=ba;if(ba){if(bc&&a.isFunction(ba)){aR[a8](bc[0])}}else{try{delete aR[a8]}catch(bf){}}},be.complete];be.converters["script json"]=function(){if(!bc){a.error(a8+" was not called")}return bc[0]};be.dataTypes[0]="json";return"script"}});a.ajaxSetup({accepts:{script:"text/javascript, application/javascript"},contents:{script:/javascript/},converters:{"text script":function(a6){a.globalEval(a6);return a6}}});a.ajaxPrefilter("script",function(a6){if(a6.cache===G){a6.cache=false}if(a6.crossDomain){a6.type="GET";a6.global=false}});a.ajaxTransport("script",function(a8){if(a8.crossDomain){var a6,a7=ag.getElementsByTagName("head")[0]||ag.documentElement;return{send:function(a9,ba){a6=ag.createElement("script");a6.async="async";if(a8.scriptCharset){a6.charset=a8.scriptCharset}a6.src=a8.url;a6.onload=a6.onreadystatechange=function(bc,bb){if(!a6.readyState||/loaded|complete/.test(a6.readyState)){a6.onload=a6.onreadystatechange=null;if(a7&&a6.parentNode){a7.removeChild(a6)}a6=G;if(!bb){ba(200,"success")}}};a7.insertBefore(a6,a7.firstChild)},abort:function(){if(a6){a6.onload(0,1)}}}}});var x=a.now(),aH={},aE,am;a.ajaxSettings.xhr=aR.ActiveXObject?function(){if(aR.location.protocol!=="file:"){try{return new aR.XMLHttpRequest()}catch(a7){}}try{return new aR.ActiveXObject("Microsoft.XMLHTTP")}catch(a6){}}:function(){return new aR.XMLHttpRequest()};try{am=a.ajaxSettings.xhr()}catch(a3){}a.support.ajax=!!am;a.support.cors=am&&("withCredentials" in am);am=G;if(a.support.ajax){a.ajaxTransport(function(a6){if(!a6.crossDomain||a.support.cors){var a7;return{send:function(bc,a8){if(!aE){aE=1;a(aR).bind("unload",function(){a.each(aH,function(bd,be){if(be.onreadystatechange){be.onreadystatechange(1)}})})}var bb=a6.xhr(),ba;if(a6.username){bb.open(a6.type,a6.url,a6.async,a6.username,a6.password)}else{bb.open(a6.type,a6.url,a6.async)}if(!(a6.crossDomain&&!a6.hasContent)&&!bc["x-requested-with"]){bc["x-requested-with"]="XMLHttpRequest"}try{a.each(bc,function(bd,be){bb.setRequestHeader(bd,be)})}catch(a9){}bb.send((a6.hasContent&&a6.data)||null);a7=function(bg,be){if(a7&&(be||bb.readyState===4)){a7=0;if(ba){bb.onreadystatechange=a.noop;delete aH[ba]}if(be){if(bb.readyState!==4){bb.abort()}}else{var bd=bb.status,bk,bh=bb.getAllResponseHeaders(),bi={},bf=bb.responseXML;if(bf&&bf.documentElement){bi.xml=bf}bi.text=bb.responseText;try{bk=bb.statusText}catch(bj){bk=""}bd=bd===0?(!a6.crossDomain||bk?(bh?304:0):302):(bd==1223?204:bd);a8(bd,bk,bi,bh)}}};if(!a6.async||bb.readyState===4){a7()}else{ba=x++;aH[ba]=bb;bb.onreadystatechange=a7}},abort:function(){if(a7){a7(0,1)}}}}})}var L={},aj=/^(?:toggle|show|hide)$/,aw=/^([+\\-]=)?([\\d+.\\-]+)([a-z%]*)$/i,aL,ap=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];a.fn.extend({show:function(a9,bc,bb){var a8,ba;if(a9||a9===0){return this.animate(aJ("show",3),a9,bc,bb)}else{for(var a7=0,a6=this.length;a7<a6;a7++){a8=this[a7];ba=a8.style.display;if(!a._data(a8,"olddisplay")&&ba==="none"){ba=a8.style.display=""}if(ba===""&&a.css(a8,"display")==="none"){a._data(a8,"olddisplay",w(a8.nodeName))}}for(a7=0;a7<a6;a7++){a8=this[a7];ba=a8.style.display;if(ba===""||ba==="none"){a8.style.display=a._data(a8,"olddisplay")||""}}return this}},hide:function(a8,bb,ba){if(a8||a8===0){return this.animate(aJ("hide",3),a8,bb,ba)}else{for(var a7=0,a6=this.length;a7<a6;a7++){var a9=a.css(this[a7],"display");if(a9!=="none"&&!a._data(this[a7],"olddisplay")){a._data(this[a7],"olddisplay",a9)}}for(a7=0;a7<a6;a7++){this[a7].style.display="none"}return this}},_toggle:a.fn.toggle,toggle:function(a8,a7,a9){var a6=typeof a8==="boolean";if(a.isFunction(a8)&&a.isFunction(a7)){this._toggle.apply(this,arguments)}else{if(a8==null||a6){this.each(function(){var ba=a6?a8:a(this).is(":hidden");a(this)[ba?"show":"hide"]()})}else{this.animate(aJ("toggle",3),a8,a7,a9)}}return this},fadeTo:function(a6,a9,a8,a7){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:a9},a6,a8,a7)},animate:function(ba,a7,a9,a8){var a6=a.speed(a7,a9,a8);if(a.isEmptyObject(ba)){return this.each(a6.complete)}return this[a6.queue===false?"each":"queue"](function(){var bd=a.extend({},a6),bh,be=this.nodeType===1,bf=be&&a(this).is(":hidden"),bb=this;for(bh in ba){var bc=a.camelCase(bh);if(bh!==bc){ba[bc]=ba[bh];delete ba[bh];bh=bc}if(ba[bh]==="hide"&&bf||ba[bh]==="show"&&!bf){return bd.complete.call(this)}if(be&&(bh==="height"||bh==="width")){bd.overflow=[this.style.overflow,this.style.overflowX,this.style.overflowY];if(a.css(this,"display")==="inline"&&a.css(this,"float")==="none"){if(!a.support.inlineBlockNeedsLayout){this.style.display="inline-block"}else{var bg=w(this.nodeName);if(bg==="inline"){this.style.display="inline-block"}else{this.style.display="inline";this.style.zoom=1}}}}if(a.isArray(ba[bh])){(bd.specialEasing=bd.specialEasing||{})[bh]=ba[bh][1];ba[bh]=ba[bh][0]}}if(bd.overflow!=null){this.style.overflow="hidden"}bd.curAnim=a.extend({},ba);a.each(ba,function(bj,bn){var bm=new a.fx(bb,bd,bj);if(aj.test(bn)){bm[bn==="toggle"?bf?"show":"hide":bn](ba)}else{var bl=aw.exec(bn),bo=bm.cur()||0;if(bl){var bi=parseFloat(bl[2]),bk=bl[3]||"px";if(bk!=="px"){a.style(bb,bj,(bi||1)+bk);bo=((bi||1)/bm.cur())*bo;a.style(bb,bj,bo+bk)}if(bl[1]){bi=((bl[1]==="-="?-1:1)*bi)+bo}bm.custom(bo,bi,bk)}else{bm.custom(bo,bn,"")}}});return true})},stop:function(a7,a6){var a8=a.timers;if(a7){this.queue([])}this.each(function(){for(var a9=a8.length-1;a9>=0;a9--){if(a8[a9].elem===this){if(a6){a8[a9](true)}a8.splice(a9,1)}}});if(!a6){this.dequeue()}return this}});function aJ(a7,a6){var a8={};a.each(ap.concat.apply([],ap.slice(0,a6)),function(){a8[this]=a7});return a8}a.each({slideDown:aJ("show",1),slideUp:aJ("hide",1),slideToggle:aJ("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a6,a7){a.fn[a6]=function(a8,ba,a9){return this.animate(a7,a8,ba,a9)}});a.extend({speed:function(a8,a9,a7){var a6=a8&&typeof a8==="object"?a.extend({},a8):{complete:a7||!a7&&a9||a.isFunction(a8)&&a8,duration:a8,easing:a7&&a9||a9&&!a.isFunction(a9)&&a9};a6.duration=a.fx.off?0:typeof a6.duration==="number"?a6.duration:a6.duration in a.fx.speeds?a.fx.speeds[a6.duration]:a.fx.speeds._default;a6.old=a6.complete;a6.complete=function(){if(a6.queue!==false){a(this).dequeue()}if(a.isFunction(a6.old)){a6.old.call(this)}};return a6},easing:{linear:function(a8,a9,a6,a7){return a6+a7*a8},swing:function(a8,a9,a6,a7){return((-Math.cos(a8*Math.PI)/2)+0.5)*a7+a6}},timers:[],fx:function(a7,a6,a8){this.options=a6;this.elem=a7;this.prop=a8;if(!a6.orig){a6.orig={}}}});a.fx.prototype={update:function(){if(this.options.step){this.options.step.call(this.elem,this.now,this)}(a.fx.step[this.prop]||a.fx.step._default)(this)},cur:function(){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null)){return this.elem[this.prop]}var a6=parseFloat(a.css(this.elem,this.prop));return a6||0},custom:function(bb,ba,a9){var a6=this,a8=a.fx;this.startTime=a.now();this.start=bb;this.end=ba;this.unit=a9||this.unit||"px";this.now=this.start;this.pos=this.state=0;function a7(bc){return a6.step(bc)}a7.elem=this.elem;if(a7()&&a.timers.push(a7)&&!aL){aL=setInterval(a8.tick,a8.interval)}},show:function(){this.options.orig[this.prop]=a.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());a(this.elem).show()},hide:function(){this.options.orig[this.prop]=a.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a9){var be=a.now(),ba=true;if(a9||be>=this.options.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var bb in this.options.curAnim){if(this.options.curAnim[bb]!==true){ba=false}}if(ba){if(this.options.overflow!=null&&!a.support.shrinkWrapBlocks){var a8=this.elem,bf=this.options;a.each(["","X","Y"],function(bg,bh){a8.style["overflow"+bh]=bf.overflow[bg]})}if(this.options.hide){a(this.elem).hide()}if(this.options.hide||this.options.show){for(var a6 in this.options.curAnim){a.style(this.elem,a6,this.options.orig[a6])}}this.options.complete.call(this.elem)}return false}else{var a7=be-this.startTime;this.state=a7/this.options.duration;var bc=this.options.specialEasing&&this.options.specialEasing[this.prop];var bd=this.options.easing||(a.easing.swing?"swing":"linear");this.pos=a.easing[bc||bd](this.state,a7,0,1,this.options.duration);this.now=this.start+((this.end-this.start)*this.pos);this.update()}return true}};a.extend(a.fx,{tick:function(){var a7=a.timers;for(var a6=0;a6<a7.length;a6++){if(!a7[a6]()){a7.splice(a6--,1)}}if(!a7.length){a.fx.stop()}},interval:13,stop:function(){clearInterval(aL);aL=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a6){a.style(a6.elem,"opacity",a6.now)},_default:function(a6){if(a6.elem.style&&a6.elem.style[a6.prop]!=null){a6.elem.style[a6.prop]=(a6.prop==="width"||a6.prop==="height"?Math.max(0,a6.now):a6.now)+a6.unit}else{a6.elem[a6.prop]=a6.now}}}});if(a.expr&&a.expr.filters){a.expr.filters.animated=function(a6){return a.grep(a.timers,function(a7){return a6===a7.elem}).length}}function w(a8){if(!L[a8]){var a6=a("<"+a8+">").appendTo("body"),a7=a6.css("display");a6.remove();if(a7==="none"||a7===""){a7="block"}L[a8]=a7}return L[a8]}var O=/^t(?:able|d|h)$/i,U=/^(?:body|html)$/i;if("getBoundingClientRect" in ag.documentElement){a.fn.offset=function(bj){var a9=this[0],bc;if(bj){return this.each(function(bk){a.offset.setOffset(this,bj,bk)})}if(!a9||!a9.ownerDocument){return null}if(a9===a9.ownerDocument.body){return a.offset.bodyOffset(a9)}try{bc=a9.getBoundingClientRect()}catch(bg){}var bi=a9.ownerDocument,a7=bi.documentElement;if(!bc||!a.contains(a7,a9)){return bc?{top:bc.top,left:bc.left}:{top:0,left:0}}var bd=bi.body,be=ar(bi),bb=a7.clientTop||bd.clientTop||0,bf=a7.clientLeft||bd.clientLeft||0,a6=(be.pageYOffset||a.support.boxModel&&a7.scrollTop||bd.scrollTop),ba=(be.pageXOffset||a.support.boxModel&&a7.scrollLeft||bd.scrollLeft),bh=bc.top+a6-bb,a8=bc.left+ba-bf;return{top:bh,left:a8}}}else{a.fn.offset=function(bh){var bb=this[0];if(bh){return this.each(function(bi){a.offset.setOffset(this,bh,bi)})}if(!bb||!bb.ownerDocument){return null}if(bb===bb.ownerDocument.body){return a.offset.bodyOffset(bb)}a.offset.initialize();var be,a8=bb.offsetParent,a7=bb,bg=bb.ownerDocument,a9=bg.documentElement,bc=bg.body,bd=bg.defaultView,a6=bd?bd.getComputedStyle(bb,null):bb.currentStyle,bf=bb.offsetTop,ba=bb.offsetLeft;while((bb=bb.parentNode)&&bb!==bc&&bb!==a9){if(a.offset.supportsFixedPosition&&a6.position==="fixed"){break}be=bd?bd.getComputedStyle(bb,null):bb.currentStyle;bf-=bb.scrollTop;ba-=bb.scrollLeft;if(bb===a8){bf+=bb.offsetTop;ba+=bb.offsetLeft;if(a.offset.doesNotAddBorder&&!(a.offset.doesAddBorderForTableAndCells&&O.test(bb.nodeName))){bf+=parseFloat(be.borderTopWidth)||0;ba+=parseFloat(be.borderLeftWidth)||0}a7=a8;a8=bb.offsetParent}if(a.offset.subtractsBorderForOverflowNotVisible&&be.overflow!=="visible"){bf+=parseFloat(be.borderTopWidth)||0;ba+=parseFloat(be.borderLeftWidth)||0}a6=be}if(a6.position==="relative"||a6.position==="static"){bf+=bc.offsetTop;ba+=bc.offsetLeft}if(a.offset.supportsFixedPosition&&a6.position==="fixed"){bf+=Math.max(a9.scrollTop,bc.scrollTop);ba+=Math.max(a9.scrollLeft,bc.scrollLeft)}return{top:bf,left:ba}}}a.offset={initialize:function(){var a6=ag.body,a7=ag.createElement("div"),ba,bc,bb,bd,a8=parseFloat(a.css(a6,"marginTop"))||0,a9="<div style=\'position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;\'><div></div></div><table style=\'position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;\' cellpadding=\'0\' cellspacing=\'0\'><tr><td></td></tr></table>";a.extend(a7.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});a7.innerHTML=a9;a6.insertBefore(a7,a6.firstChild);ba=a7.firstChild;bc=ba.firstChild;bd=ba.nextSibling.firstChild.firstChild;this.doesNotAddBorder=(bc.offsetTop!==5);this.doesAddBorderForTableAndCells=(bd.offsetTop===5);bc.style.position="fixed";bc.style.top="20px";this.supportsFixedPosition=(bc.offsetTop===20||bc.offsetTop===15);bc.style.position=bc.style.top="";ba.style.overflow="hidden";ba.style.position="relative";this.subtractsBorderForOverflowNotVisible=(bc.offsetTop===-5);this.doesNotIncludeMarginInBodyOffset=(a6.offsetTop!==a8);a6.removeChild(a7);a6=a7=ba=bc=bb=bd=null;a.offset.initialize=a.noop},bodyOffset:function(a6){var a8=a6.offsetTop,a7=a6.offsetLeft;a.offset.initialize();if(a.offset.doesNotIncludeMarginInBodyOffset){a8+=parseFloat(a.css(a6,"marginTop"))||0;a7+=parseFloat(a.css(a6,"marginLeft"))||0}return{top:a8,left:a7}},setOffset:function(a9,bi,bc){var bd=a.css(a9,"position");if(bd==="static"){a9.style.position="relative"}var bb=a(a9),a7=bb.offset(),a6=a.css(a9,"top"),bg=a.css(a9,"left"),bh=(bd==="absolute"&&a.inArray("auto",[a6,bg])>-1),bf={},be={},a8,ba;if(bh){be=bb.position()}a8=bh?be.top:parseInt(a6,10)||0;ba=bh?be.left:parseInt(bg,10)||0;if(a.isFunction(bi)){bi=bi.call(a9,bc,a7)}if(bi.top!=null){bf.top=(bi.top-a7.top)+a8}if(bi.left!=null){bf.left=(bi.left-a7.left)+ba}if("using" in bi){bi.using.call(a9,bf)}else{bb.css(bf)}}};a.fn.extend({position:function(){if(!this[0]){return null}var a8=this[0],a7=this.offsetParent(),a9=this.offset(),a6=U.test(a7[0].nodeName)?{top:0,left:0}:a7.offset();a9.top-=parseFloat(a.css(a8,"marginTop"))||0;a9.left-=parseFloat(a.css(a8,"marginLeft"))||0;a6.top+=parseFloat(a.css(a7[0],"borderTopWidth"))||0;a6.left+=parseFloat(a.css(a7[0],"borderLeftWidth"))||0;return{top:a9.top-a6.top,left:a9.left-a6.left}},offsetParent:function(){return this.map(function(){var a6=this.offsetParent||ag.body;while(a6&&(!U.test(a6.nodeName)&&a.css(a6,"position")==="static")){a6=a6.offsetParent}return a6})}});a.each(["Left","Top"],function(a7,a6){var a8="scroll"+a6;a.fn[a8]=function(bb){var a9=this[0],ba;if(!a9){return null}if(bb!==G){return this.each(function(){ba=ar(this);if(ba){ba.scrollTo(!a7?bb:a(ba).scrollLeft(),a7?bb:a(ba).scrollTop())}else{this[a8]=bb}})}else{ba=ar(a9);return ba?("pageXOffset" in ba)?ba[a7?"pageYOffset":"pageXOffset"]:a.support.boxModel&&ba.document.documentElement[a8]||ba.document.body[a8]:a9[a8]}}});function ar(a6){return a.isWindow(a6)?a6:a6.nodeType===9?a6.defaultView||a6.parentWindow:false}a.each(["Height","Width"],function(a7,a6){var a8=a6.toLowerCase();a.fn["inner"+a6]=function(){return this[0]?parseFloat(a.css(this[0],a8,"padding")):null};a.fn["outer"+a6]=function(a9){return this[0]?parseFloat(a.css(this[0],a8,a9?"margin":"border")):null};a.fn[a8]=function(ba){var bb=this[0];if(!bb){return ba==null?null:this}if(a.isFunction(ba)){return this.each(function(bf){var be=a(this);be[a8](ba.call(this,bf,be[a8]()))})}if(a.isWindow(bb)){var bc=bb.document.documentElement["client"+a6];return bb.document.compatMode==="CSS1Compat"&&bc||bb.document.body["client"+a6]||bc}else{if(bb.nodeType===9){return Math.max(bb.documentElement["client"+a6],bb.body["scroll"+a6],bb.documentElement["scroll"+a6],bb.body["offset"+a6],bb.documentElement["offset"+a6])}else{if(ba===G){var bd=a.css(bb,a8),a9=parseFloat(bd);return a.isNaN(a9)?bd:a9}else{return this.css(a8,typeof ba==="string"?ba:ba+"px")}}}}})})(window);\n'''
plot = '''/* Javascript plotting library for jQuery, v. 0.7.\n *\n * Released under the MIT license by IOLA, December 2007.\n *\n */\n(function(b){b.color={};b.color.make=function(d,e,g,f){var c={};c.r=d||0;c.g=e||0;c.b=g||0;c.a=f!=null?f:1;c.add=function(h,j){for(var k=0;k<h.length;++k){c[h.charAt(k)]+=j}return c.normalize()};c.scale=function(h,j){for(var k=0;k<h.length;++k){c[h.charAt(k)]*=j}return c.normalize()};c.toString=function(){if(c.a>=1){return"rgb("+[c.r,c.g,c.b].join(",")+")"}else{return"rgba("+[c.r,c.g,c.b,c.a].join(",")+")"}};c.normalize=function(){function h(k,j,l){return j<k?k:(j>l?l:j)}c.r=h(0,parseInt(c.r),255);c.g=h(0,parseInt(c.g),255);c.b=h(0,parseInt(c.b),255);c.a=h(0,c.a,1);return c};c.clone=function(){return b.color.make(c.r,c.b,c.g,c.a)};return c.normalize()};b.color.extract=function(d,e){var c;do{c=d.css(e).toLowerCase();if(c!=""&&c!="transparent"){break}d=d.parent()}while(!b.nodeName(d.get(0),"body"));if(c=="rgba(0, 0, 0, 0)"){c="transparent"}return b.color.parse(c)};b.color.parse=function(c){var d,f=b.color.make;if(d=/rgb\\(\\s*([0-9]{1,3})\\s*,\\s*([0-9]{1,3})\\s*,\\s*([0-9]{1,3})\\s*\\)/.exec(c)){return f(parseInt(d[1],10),parseInt(d[2],10),parseInt(d[3],10))}if(d=/rgba\\(\\s*([0-9]{1,3})\\s*,\\s*([0-9]{1,3})\\s*,\\s*([0-9]{1,3})\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\s*\\)/.exec(c)){return f(parseInt(d[1],10),parseInt(d[2],10),parseInt(d[3],10),parseFloat(d[4]))}if(d=/rgb\\(\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*\\)/.exec(c)){return f(parseFloat(d[1])*2.55,parseFloat(d[2])*2.55,parseFloat(d[3])*2.55)}if(d=/rgba\\(\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\s*\\)/.exec(c)){return f(parseFloat(d[1])*2.55,parseFloat(d[2])*2.55,parseFloat(d[3])*2.55,parseFloat(d[4]))}if(d=/#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(c)){return f(parseInt(d[1],16),parseInt(d[2],16),parseInt(d[3],16))}if(d=/#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(c)){return f(parseInt(d[1]+d[1],16),parseInt(d[2]+d[2],16),parseInt(d[3]+d[3],16))}var e=b.trim(c).toLowerCase();if(e=="transparent"){return f(255,255,255,0)}else{d=a[e]||[0,0,0];return f(d[0],d[1],d[2])}};var a={aqua:[0,255,255],azure:[240,255,255],beige:[245,245,220],black:[0,0,0],blue:[0,0,255],brown:[165,42,42],cyan:[0,255,255],darkblue:[0,0,139],darkcyan:[0,139,139],darkgrey:[169,169,169],darkgreen:[0,100,0],darkkhaki:[189,183,107],darkmagenta:[139,0,139],darkolivegreen:[85,107,47],darkorange:[255,140,0],darkorchid:[153,50,204],darkred:[139,0,0],darksalmon:[233,150,122],darkviolet:[148,0,211],fuchsia:[255,0,255],gold:[255,215,0],green:[0,128,0],indigo:[75,0,130],khaki:[240,230,140],lightblue:[173,216,230],lightcyan:[224,255,255],lightgreen:[144,238,144],lightgrey:[211,211,211],lightpink:[255,182,193],lightyellow:[255,255,224],lime:[0,255,0],magenta:[255,0,255],maroon:[128,0,0],navy:[0,0,128],olive:[128,128,0],orange:[255,165,0],pink:[255,192,203],purple:[128,0,128],violet:[128,0,128],red:[255,0,0],silver:[192,192,192],white:[255,255,255],yellow:[255,255,0]}})(jQuery);(function(c){function b(aw,aj,K,ag){var R=[],P={colors:["#edc240","#afd8f8","#cb4b4b","#4da74d","#9440ed"],legend:{show:true,noColumns:1,labelFormatter:null,labelBoxBorderColor:"#ccc",container:null,position:"ne",margin:5,backgroundColor:null,backgroundOpacity:0.85},xaxis:{show:null,position:"bottom",mode:null,color:null,tickColor:null,transform:null,inverseTransform:null,min:null,max:null,autoscaleMargin:null,ticks:null,tickFormatter:null,labelWidth:null,labelHeight:null,reserveSpace:null,tickLength:null,alignTicksWithAxis:null,tickDecimals:null,tickSize:null,minTickSize:null,monthNames:null,timeformat:null,twelveHourClock:false},yaxis:{autoscaleMargin:0.02,position:"left"},xaxes:[],yaxes:[],series:{points:{show:false,radius:3,lineWidth:2,fill:true,fillColor:"#ffffff",symbol:"circle"},lines:{lineWidth:2,fill:false,fillColor:null,steps:false},bars:{show:false,lineWidth:2,barWidth:1,fill:true,fillColor:null,align:"left",horizontal:false},shadowSize:3},grid:{show:true,aboveData:false,color:"#545454",backgroundColor:null,borderColor:null,tickColor:null,labelMargin:5,axisMargin:8,borderWidth:2,minBorderMargin:null,markings:null,markingsColor:"#f4f4f4",markingsLineWidth:2,clickable:false,hoverable:false,autoHighlight:true,mouseActiveRadius:10},hooks:{}},aA=null,ae=null,z=null,I=null,B=null,q=[],ax=[],r={left:0,right:0,top:0,bottom:0},H=0,J=0,h=0,x=0,al={processOptions:[],processRawData:[],processDatapoints:[],drawSeries:[],draw:[],bindEvents:[],drawOverlay:[],shutdown:[]},ar=this;ar.setData=ak;ar.setupGrid=u;ar.draw=X;ar.getPlaceholder=function(){return aw};ar.getCanvas=function(){return aA};ar.getPlotOffset=function(){return r};ar.width=function(){return h};ar.height=function(){return x};ar.offset=function(){var aC=z.offset();aC.left+=r.left;aC.top+=r.top;return aC};ar.getData=function(){return R};ar.getAxes=function(){var aD={},aC;c.each(q.concat(ax),function(aE,aF){if(aF){aD[aF.direction+(aF.n!=1?aF.n:"")+"axis"]=aF}});return aD};ar.getXAxes=function(){return q};ar.getYAxes=function(){return ax};ar.c2p=D;ar.p2c=at;ar.getOptions=function(){return P};ar.highlight=y;ar.unhighlight=U;ar.triggerRedrawOverlay=f;ar.pointOffset=function(aC){return{left:parseInt(q[aB(aC,"x")-1].p2c(+aC.x)+r.left),top:parseInt(ax[aB(aC,"y")-1].p2c(+aC.y)+r.top)}};ar.shutdown=ah;ar.resize=function(){C();g(aA);g(ae)};ar.hooks=al;G(ar);aa(K);Y();ak(aj);u();X();ai();function ao(aE,aC){aC=[ar].concat(aC);for(var aD=0;aD<aE.length;++aD){aE[aD].apply(this,aC)}}function G(){for(var aC=0;aC<ag.length;++aC){var aD=ag[aC];aD.init(ar);if(aD.options){c.extend(true,P,aD.options)}}}function aa(aD){var aC;c.extend(true,P,aD);if(P.xaxis.color==null){P.xaxis.color=P.grid.color}if(P.yaxis.color==null){P.yaxis.color=P.grid.color}if(P.xaxis.tickColor==null){P.xaxis.tickColor=P.grid.tickColor}if(P.yaxis.tickColor==null){P.yaxis.tickColor=P.grid.tickColor}if(P.grid.borderColor==null){P.grid.borderColor=P.grid.color}if(P.grid.tickColor==null){P.grid.tickColor=c.color.parse(P.grid.color).scale("a",0.22).toString()}for(aC=0;aC<Math.max(1,P.xaxes.length);++aC){P.xaxes[aC]=c.extend(true,{},P.xaxis,P.xaxes[aC])}for(aC=0;aC<Math.max(1,P.yaxes.length);++aC){P.yaxes[aC]=c.extend(true,{},P.yaxis,P.yaxes[aC])}if(P.xaxis.noTicks&&P.xaxis.ticks==null){P.xaxis.ticks=P.xaxis.noTicks}if(P.yaxis.noTicks&&P.yaxis.ticks==null){P.yaxis.ticks=P.yaxis.noTicks}if(P.x2axis){P.xaxes[1]=c.extend(true,{},P.xaxis,P.x2axis);P.xaxes[1].position="top"}if(P.y2axis){P.yaxes[1]=c.extend(true,{},P.yaxis,P.y2axis);P.yaxes[1].position="right"}if(P.grid.coloredAreas){P.grid.markings=P.grid.coloredAreas}if(P.grid.coloredAreasColor){P.grid.markingsColor=P.grid.coloredAreasColor}if(P.lines){c.extend(true,P.series.lines,P.lines)}if(P.points){c.extend(true,P.series.points,P.points)}if(P.bars){c.extend(true,P.series.bars,P.bars)}if(P.shadowSize!=null){P.series.shadowSize=P.shadowSize}for(aC=0;aC<P.xaxes.length;++aC){W(q,aC+1).options=P.xaxes[aC]}for(aC=0;aC<P.yaxes.length;++aC){W(ax,aC+1).options=P.yaxes[aC]}for(var aE in al){if(P.hooks[aE]&&P.hooks[aE].length){al[aE]=al[aE].concat(P.hooks[aE])}}ao(al.processOptions,[P])}function ak(aC){R=Z(aC);ay();A()}function Z(aF){var aD=[];for(var aC=0;aC<aF.length;++aC){var aE=c.extend(true,{},P.series);if(aF[aC].data!=null){aE.data=aF[aC].data;delete aF[aC].data;c.extend(true,aE,aF[aC]);aF[aC].data=aE.data}else{aE.data=aF[aC]}aD.push(aE)}return aD}function aB(aD,aE){var aC=aD[aE+"axis"];if(typeof aC=="object"){aC=aC.n}if(typeof aC!="number"){aC=1}return aC}function n(){return c.grep(q.concat(ax),function(aC){return aC})}function D(aF){var aD={},aC,aE;for(aC=0;aC<q.length;++aC){aE=q[aC];if(aE&&aE.used){aD["x"+aE.n]=aE.c2p(aF.left)}}for(aC=0;aC<ax.length;++aC){aE=ax[aC];if(aE&&aE.used){aD["y"+aE.n]=aE.c2p(aF.top)}}if(aD.x1!==undefined){aD.x=aD.x1}if(aD.y1!==undefined){aD.y=aD.y1}return aD}function at(aG){var aE={},aD,aF,aC;for(aD=0;aD<q.length;++aD){aF=q[aD];if(aF&&aF.used){aC="x"+aF.n;if(aG[aC]==null&&aF.n==1){aC="x"}if(aG[aC]!=null){aE.left=aF.p2c(aG[aC]);break}}}for(aD=0;aD<ax.length;++aD){aF=ax[aD];if(aF&&aF.used){aC="y"+aF.n;if(aG[aC]==null&&aF.n==1){aC="y"}if(aG[aC]!=null){aE.top=aF.p2c(aG[aC]);break}}}return aE}function W(aD,aC){if(!aD[aC-1]){aD[aC-1]={n:aC,direction:aD==q?"x":"y",options:c.extend(true,{},aD==q?P.xaxis:P.yaxis)}}return aD[aC-1]}function ay(){var aH;var aN=R.length,aC=[],aF=[];for(aH=0;aH<R.length;++aH){var aK=R[aH].color;if(aK!=null){--aN;if(typeof aK=="number"){aF.push(aK)}else{aC.push(c.color.parse(R[aH].color))}}}for(aH=0;aH<aF.length;++aH){aN=Math.max(aN,aF[aH]+1)}var aD=[],aG=0;aH=0;while(aD.length<aN){var aJ;if(P.colors.length==aH){aJ=c.color.make(100,100,100)}else{aJ=c.color.parse(P.colors[aH])}var aE=aG%2==1?-1:1;aJ.scale("rgb",1+aE*Math.ceil(aG/2)*0.2);aD.push(aJ);++aH;if(aH>=P.colors.length){aH=0;++aG}}var aI=0,aO;for(aH=0;aH<R.length;++aH){aO=R[aH];if(aO.color==null){aO.color=aD[aI].toString();++aI}else{if(typeof aO.color=="number"){aO.color=aD[aO.color].toString()}}if(aO.lines.show==null){var aM,aL=true;for(aM in aO){if(aO[aM]&&aO[aM].show){aL=false;break}}if(aL){aO.lines.show=true}}aO.xaxis=W(q,aB(aO,"x"));aO.yaxis=W(ax,aB(aO,"y"))}}function A(){var aP=Number.POSITIVE_INFINITY,aJ=Number.NEGATIVE_INFINITY,aC=Number.MAX_VALUE,aV,aT,aS,aO,aE,aK,aU,aQ,aI,aH,aD,a1,aY,aM;function aG(a4,a3,a2){if(a3<a4.datamin&&a3!=-aC){a4.datamin=a3}if(a2>a4.datamax&&a2!=aC){a4.datamax=a2}}c.each(n(),function(a2,a3){a3.datamin=aP;a3.datamax=aJ;a3.used=false});for(aV=0;aV<R.length;++aV){aK=R[aV];aK.datapoints={points:[]};ao(al.processRawData,[aK,aK.data,aK.datapoints])}for(aV=0;aV<R.length;++aV){aK=R[aV];var a0=aK.data,aX=aK.datapoints.format;if(!aX){aX=[];aX.push({x:true,number:true,required:true});aX.push({y:true,number:true,required:true});if(aK.bars.show||(aK.lines.show&&aK.lines.fill)){aX.push({y:true,number:true,required:false,defaultValue:0});if(aK.bars.horizontal){delete aX[aX.length-1].y;aX[aX.length-1].x=true}}aK.datapoints.format=aX}if(aK.datapoints.pointsize!=null){continue}aK.datapoints.pointsize=aX.length;aQ=aK.datapoints.pointsize;aU=aK.datapoints.points;insertSteps=aK.lines.show&&aK.lines.steps;aK.xaxis.used=aK.yaxis.used=true;for(aT=aS=0;aT<a0.length;++aT,aS+=aQ){aM=a0[aT];var aF=aM==null;if(!aF){for(aO=0;aO<aQ;++aO){a1=aM[aO];aY=aX[aO];if(aY){if(aY.number&&a1!=null){a1=+a1;if(isNaN(a1)){a1=null}else{if(a1==Infinity){a1=aC}else{if(a1==-Infinity){a1=-aC}}}}if(a1==null){if(aY.required){aF=true}if(aY.defaultValue!=null){a1=aY.defaultValue}}}aU[aS+aO]=a1}}if(aF){for(aO=0;aO<aQ;++aO){a1=aU[aS+aO];if(a1!=null){aY=aX[aO];if(aY.x){aG(aK.xaxis,a1,a1)}if(aY.y){aG(aK.yaxis,a1,a1)}}aU[aS+aO]=null}}else{if(insertSteps&&aS>0&&aU[aS-aQ]!=null&&aU[aS-aQ]!=aU[aS]&&aU[aS-aQ+1]!=aU[aS+1]){for(aO=0;aO<aQ;++aO){aU[aS+aQ+aO]=aU[aS+aO]}aU[aS+1]=aU[aS-aQ+1];aS+=aQ}}}}for(aV=0;aV<R.length;++aV){aK=R[aV];ao(al.processDatapoints,[aK,aK.datapoints])}for(aV=0;aV<R.length;++aV){aK=R[aV];aU=aK.datapoints.points,aQ=aK.datapoints.pointsize;var aL=aP,aR=aP,aN=aJ,aW=aJ;for(aT=0;aT<aU.length;aT+=aQ){if(aU[aT]==null){continue}for(aO=0;aO<aQ;++aO){a1=aU[aT+aO];aY=aX[aO];if(!aY||a1==aC||a1==-aC){continue}if(aY.x){if(a1<aL){aL=a1}if(a1>aN){aN=a1}}if(aY.y){if(a1<aR){aR=a1}if(a1>aW){aW=a1}}}}if(aK.bars.show){var aZ=aK.bars.align=="left"?0:-aK.bars.barWidth/2;if(aK.bars.horizontal){aR+=aZ;aW+=aZ+aK.bars.barWidth}else{aL+=aZ;aN+=aZ+aK.bars.barWidth}}aG(aK.xaxis,aL,aN);aG(aK.yaxis,aR,aW)}c.each(n(),function(a2,a3){if(a3.datamin==aP){a3.datamin=null}if(a3.datamax==aJ){a3.datamax=null}})}function j(aC,aD){var aE=document.createElement("canvas");aE.className=aD;aE.width=H;aE.height=J;if(!aC){c(aE).css({position:"absolute",left:0,top:0})}c(aE).appendTo(aw);if(!aE.getContext){aE=window.G_vmlCanvasManager.initElement(aE)}aE.getContext("2d").save();return aE}function C(){H=aw.width();J=aw.height();if(H<=0||J<=0){throw"Invalid dimensions for plot, width = "+H+", height = "+J}}function g(aD){if(aD.width!=H){aD.width=H}if(aD.height!=J){aD.height=J}var aC=aD.getContext("2d");aC.restore();aC.save()}function Y(){var aD,aC=aw.children("canvas.base"),aE=aw.children("canvas.overlay");if(aC.length==0||aE==0){aw.html("");aw.css({padding:0});if(aw.css("position")=="static"){aw.css("position","relative")}C();aA=j(true,"base");ae=j(false,"overlay");aD=false}else{aA=aC.get(0);ae=aE.get(0);aD=true}I=aA.getContext("2d");B=ae.getContext("2d");z=c([ae,aA]);if(aD){aw.data("plot").shutdown();ar.resize();B.clearRect(0,0,H,J);z.unbind();aw.children().not([aA,ae]).remove()}aw.data("plot",ar)}function ai(){if(P.grid.hoverable){z.mousemove(ab);z.mouseleave(l)}if(P.grid.clickable){z.click(S)}ao(al.bindEvents,[z])}function ah(){if(N){clearTimeout(N)}z.unbind("mousemove",ab);z.unbind("mouseleave",l);z.unbind("click",S);ao(al.shutdown,[z])}function s(aH){function aD(aI){return aI}var aG,aC,aE=aH.options.transform||aD,aF=aH.options.inverseTransform;if(aH.direction=="x"){aG=aH.scale=h/Math.abs(aE(aH.max)-aE(aH.min));aC=Math.min(aE(aH.max),aE(aH.min))}else{aG=aH.scale=x/Math.abs(aE(aH.max)-aE(aH.min));aG=-aG;aC=Math.max(aE(aH.max),aE(aH.min))}if(aE==aD){aH.p2c=function(aI){return(aI-aC)*aG}}else{aH.p2c=function(aI){return(aE(aI)-aC)*aG}}if(!aF){aH.c2p=function(aI){return aC+aI/aG}}else{aH.c2p=function(aI){return aF(aC+aI/aG)}}}function M(aE){var aC=aE.options,aG,aK=aE.ticks||[],aJ=[],aF,aL=aC.labelWidth,aH=aC.labelHeight,aD;function aI(aN,aM){return c(\'<div style="position:absolute;top:-10000px;\'+aM+\'font-size:smaller"><div class="\'+aE.direction+"Axis "+aE.direction+aE.n+\'Axis">\'+aN.join("")+"</div></div>").appendTo(aw)}if(aE.direction=="x"){if(aL==null){aL=Math.floor(H/(aK.length>0?aK.length:1))}if(aH==null){aJ=[];for(aG=0;aG<aK.length;++aG){aF=aK[aG].label;if(aF){aJ.push(\'<div class="tickLabel" style="float:left;width:\'+aL+\'px">\'+aF+"</div>")}}if(aJ.length>0){aJ.push(\'<div style="clear:left"></div>\');aD=aI(aJ,"width:10000px;");aH=aD.height();aD.remove()}}}else{if(aL==null||aH==null){for(aG=0;aG<aK.length;++aG){aF=aK[aG].label;if(aF){aJ.push(\'<div class="tickLabel">\'+aF+"</div>")}}if(aJ.length>0){aD=aI(aJ,"");if(aL==null){aL=aD.children().width()}if(aH==null){aH=aD.find("div.tickLabel").height()}aD.remove()}}}if(aL==null){aL=0}if(aH==null){aH=0}aE.labelWidth=aL;aE.labelHeight=aH}function av(aE){var aD=aE.labelWidth,aM=aE.labelHeight,aI=aE.options.position,aG=aE.options.tickLength,aH=P.grid.axisMargin,aK=P.grid.labelMargin,aL=aE.direction=="x"?q:ax,aF;var aC=c.grep(aL,function(aO){return aO&&aO.options.position==aI&&aO.reserveSpace});if(c.inArray(aE,aC)==aC.length-1){aH=0}if(aG==null){aG="full"}var aJ=c.grep(aL,function(aO){return aO&&aO.reserveSpace});var aN=c.inArray(aE,aJ)==0;if(!aN&&aG=="full"){aG=5}if(!isNaN(+aG)){aK+=+aG}if(aE.direction=="x"){aM+=aK;if(aI=="bottom"){r.bottom+=aM+aH;aE.box={top:J-r.bottom,height:aM}}else{aE.box={top:r.top+aH,height:aM};r.top+=aM+aH}}else{aD+=aK;if(aI=="left"){aE.box={left:r.left+aH,width:aD};r.left+=aD+aH}else{r.right+=aD+aH;aE.box={left:H-r.right,width:aD}}}aE.position=aI;aE.tickLength=aG;aE.box.padding=aK;aE.innermost=aN}function V(aC){if(aC.direction=="x"){aC.box.left=r.left;aC.box.width=h}else{aC.box.top=r.top;aC.box.height=x}}function u(){var aD,aF=n();c.each(aF,function(aG,aH){aH.show=aH.options.show;if(aH.show==null){aH.show=aH.used}aH.reserveSpace=aH.show||aH.options.reserveSpace;o(aH)});allocatedAxes=c.grep(aF,function(aG){return aG.reserveSpace});r.left=r.right=r.top=r.bottom=0;if(P.grid.show){c.each(allocatedAxes,function(aG,aH){T(aH);Q(aH);aq(aH,aH.ticks);M(aH)});for(aD=allocatedAxes.length-1;aD>=0;--aD){av(allocatedAxes[aD])}var aE=P.grid.minBorderMargin;if(aE==null){aE=0;for(aD=0;aD<R.length;++aD){aE=Math.max(aE,R[aD].points.radius+R[aD].points.lineWidth/2)}}for(var aC in r){r[aC]+=P.grid.borderWidth;r[aC]=Math.max(aE,r[aC])}}h=H-r.left-r.right;x=J-r.bottom-r.top;c.each(aF,function(aG,aH){s(aH)});if(P.grid.show){c.each(allocatedAxes,function(aG,aH){V(aH)});k()}p()}function o(aF){var aG=aF.options,aE=+(aG.min!=null?aG.min:aF.datamin),aC=+(aG.max!=null?aG.max:aF.datamax),aI=aC-aE;if(aI==0){var aD=aC==0?1:0.01;if(aG.min==null){aE-=aD}if(aG.max==null||aG.min!=null){aC+=aD}}else{var aH=aG.autoscaleMargin;if(aH!=null){if(aG.min==null){aE-=aI*aH;if(aE<0&&aF.datamin!=null&&aF.datamin>=0){aE=0}}if(aG.max==null){aC+=aI*aH;if(aC>0&&aF.datamax!=null&&aF.datamax<=0){aC=0}}}}aF.min=aE;aF.max=aC}function T(aH){var aN=aH.options;var aI;if(typeof aN.ticks=="number"&&aN.ticks>0){aI=aN.ticks}else{aI=0.3*Math.sqrt(aH.direction=="x"?H:J)}var aU=(aH.max-aH.min)/aI,aP,aC,aO,aS,aT,aR,aJ;if(aN.mode=="time"){var aK={second:1000,minute:60*1000,hour:60*60*1000,day:24*60*60*1000,month:30*24*60*60*1000,year:365.2425*24*60*60*1000};var aL=[[1,"second"],[2,"second"],[5,"second"],[10,"second"],[30,"second"],[1,"minute"],[2,"minute"],[5,"minute"],[10,"minute"],[30,"minute"],[1,"hour"],[2,"hour"],[4,"hour"],[8,"hour"],[12,"hour"],[1,"day"],[2,"day"],[3,"day"],[0.25,"month"],[0.5,"month"],[1,"month"],[2,"month"],[3,"month"],[6,"month"],[1,"year"]];var aD=0;if(aN.minTickSize!=null){if(typeof aN.tickSize=="number"){aD=aN.tickSize}else{aD=aN.minTickSize[0]*aK[aN.minTickSize[1]]}}for(var aT=0;aT<aL.length-1;++aT){if(aU<(aL[aT][0]*aK[aL[aT][1]]+aL[aT+1][0]*aK[aL[aT+1][1]])/2&&aL[aT][0]*aK[aL[aT][1]]>=aD){break}}aP=aL[aT][0];aO=aL[aT][1];if(aO=="year"){aR=Math.pow(10,Math.floor(Math.log(aU/aK.year)/Math.LN10));aJ=(aU/aK.year)/aR;if(aJ<1.5){aP=1}else{if(aJ<3){aP=2}else{if(aJ<7.5){aP=5}else{aP=10}}}aP*=aR}aH.tickSize=aN.tickSize||[aP,aO];aC=function(aY){var a3=[],a1=aY.tickSize[0],a4=aY.tickSize[1],a2=new Date(aY.min);var aX=a1*aK[a4];if(a4=="second"){a2.setUTCSeconds(a(a2.getUTCSeconds(),a1))}if(a4=="minute"){a2.setUTCMinutes(a(a2.getUTCMinutes(),a1))}if(a4=="hour"){a2.setUTCHours(a(a2.getUTCHours(),a1))}if(a4=="month"){a2.setUTCMonth(a(a2.getUTCMonth(),a1))}if(a4=="year"){a2.setUTCFullYear(a(a2.getUTCFullYear(),a1))}a2.setUTCMilliseconds(0);if(aX>=aK.minute){a2.setUTCSeconds(0)}if(aX>=aK.hour){a2.setUTCMinutes(0)}if(aX>=aK.day){a2.setUTCHours(0)}if(aX>=aK.day*4){a2.setUTCDate(1)}if(aX>=aK.year){a2.setUTCMonth(0)}var a6=0,a5=Number.NaN,aZ;do{aZ=a5;a5=a2.getTime();a3.push(a5);if(a4=="month"){if(a1<1){a2.setUTCDate(1);var aW=a2.getTime();a2.setUTCMonth(a2.getUTCMonth()+1);var a0=a2.getTime();a2.setTime(a5+a6*aK.hour+(a0-aW)*a1);a6=a2.getUTCHours();a2.setUTCHours(0)}else{a2.setUTCMonth(a2.getUTCMonth()+a1)}}else{if(a4=="year"){a2.setUTCFullYear(a2.getUTCFullYear()+a1)}else{a2.setTime(a5+aX)}}}while(a5<aY.max&&a5!=aZ);return a3};aS=function(aW,aZ){var a1=new Date(aW);if(aN.timeformat!=null){return c.plot.formatDate(a1,aN.timeformat,aN.monthNames)}var aX=aZ.tickSize[0]*aK[aZ.tickSize[1]];var aY=aZ.max-aZ.min;var a0=(aN.twelveHourClock)?" %p":"";if(aX<aK.minute){fmt="%h:%M:%S"+a0}else{if(aX<aK.day){if(aY<2*aK.day){fmt="%h:%M"+a0}else{fmt="%b %d %h:%M"+a0}}else{if(aX<aK.month){fmt="%b %d"}else{if(aX<aK.year){if(aY<aK.year){fmt="%b"}else{fmt="%b %y"}}else{fmt="%y"}}}}return c.plot.formatDate(a1,fmt,aN.monthNames)}}else{var aV=aN.tickDecimals;var aQ=-Math.floor(Math.log(aU)/Math.LN10);if(aV!=null&&aQ>aV){aQ=aV}aR=Math.pow(10,-aQ);aJ=aU/aR;if(aJ<1.5){aP=1}else{if(aJ<3){aP=2;if(aJ>2.25&&(aV==null||aQ+1<=aV)){aP=2.5;++aQ}}else{if(aJ<7.5){aP=5}else{aP=10}}}aP*=aR;if(aN.minTickSize!=null&&aP<aN.minTickSize){aP=aN.minTickSize}aH.tickDecimals=Math.max(0,aV!=null?aV:aQ);aH.tickSize=aN.tickSize||aP;aC=function(aY){var a0=[];var a1=a(aY.min,aY.tickSize),aX=0,aW=Number.NaN,aZ;do{aZ=aW;aW=a1+aX*aY.tickSize;a0.push(aW);++aX}while(aW<aY.max&&aW!=aZ);return a0};aS=function(aW,aX){return aW.toFixed(aX.tickDecimals)}}if(aN.alignTicksWithAxis!=null){var aG=(aH.direction=="x"?q:ax)[aN.alignTicksWithAxis-1];if(aG&&aG.used&&aG!=aH){var aM=aC(aH);if(aM.length>0){if(aN.min==null){aH.min=Math.min(aH.min,aM[0])}if(aN.max==null&&aM.length>1){aH.max=Math.max(aH.max,aM[aM.length-1])}}aC=function(aY){var aZ=[],aW,aX;for(aX=0;aX<aG.ticks.length;++aX){aW=(aG.ticks[aX].v-aG.min)/(aG.max-aG.min);aW=aY.min+aW*(aY.max-aY.min);aZ.push(aW)}return aZ};if(aH.mode!="time"&&aN.tickDecimals==null){var aF=Math.max(0,-Math.floor(Math.log(aU)/Math.LN10)+1),aE=aC(aH);if(!(aE.length>1&&/\\..*0$/.test((aE[1]-aE[0]).toFixed(aF)))){aH.tickDecimals=aF}}}}aH.tickGenerator=aC;if(c.isFunction(aN.tickFormatter)){aH.tickFormatter=function(aW,aX){return""+aN.tickFormatter(aW,aX)}}else{aH.tickFormatter=aS}}function Q(aG){var aI=aG.options.ticks,aH=[];if(aI==null||(typeof aI=="number"&&aI>0)){aH=aG.tickGenerator(aG)}else{if(aI){if(c.isFunction(aI)){aH=aI({min:aG.min,max:aG.max})}else{aH=aI}}}var aF,aC;aG.ticks=[];for(aF=0;aF<aH.length;++aF){var aD=null;var aE=aH[aF];if(typeof aE=="object"){aC=+aE[0];if(aE.length>1){aD=aE[1]}}else{aC=+aE}if(aD==null){aD=aG.tickFormatter(aC,aG)}if(!isNaN(aC)){aG.ticks.push({v:aC,label:aD})}}}function aq(aC,aD){if(aC.options.autoscaleMargin&&aD.length>0){if(aC.options.min==null){aC.min=Math.min(aC.min,aD[0].v)}if(aC.options.max==null&&aD.length>1){aC.max=Math.max(aC.max,aD[aD.length-1].v)}}}function X(){I.clearRect(0,0,H,J);var aD=P.grid;if(aD.show&&aD.backgroundColor){O()}if(aD.show&&!aD.aboveData){ad()}for(var aC=0;aC<R.length;++aC){ao(al.drawSeries,[I,R[aC]]);d(R[aC])}ao(al.draw,[I]);if(aD.show&&aD.aboveData){ad()}}function E(aC,aJ){var aF,aI,aH,aE,aG=n();for(i=0;i<aG.length;++i){aF=aG[i];if(aF.direction==aJ){aE=aJ+aF.n+"axis";if(!aC[aE]&&aF.n==1){aE=aJ+"axis"}if(aC[aE]){aI=aC[aE].from;aH=aC[aE].to;break}}}if(!aC[aE]){aF=aJ=="x"?q[0]:ax[0];aI=aC[aJ+"1"];aH=aC[aJ+"2"]}if(aI!=null&&aH!=null&&aI>aH){var aD=aI;aI=aH;aH=aD}return{from:aI,to:aH,axis:aF}}function O(){I.save();I.translate(r.left,r.top);I.fillStyle=an(P.grid.backgroundColor,x,0,"rgba(255, 255, 255, 0)");I.fillRect(0,0,h,x);I.restore()}function ad(){var aG;I.save();I.translate(r.left,r.top);var aI=P.grid.markings;if(aI){if(c.isFunction(aI)){var aL=ar.getAxes();aL.xmin=aL.xaxis.min;aL.xmax=aL.xaxis.max;aL.ymin=aL.yaxis.min;aL.ymax=aL.yaxis.max;aI=aI(aL)}for(aG=0;aG<aI.length;++aG){var aE=aI[aG],aD=E(aE,"x"),aJ=E(aE,"y");if(aD.from==null){aD.from=aD.axis.min}if(aD.to==null){aD.to=aD.axis.max}if(aJ.from==null){aJ.from=aJ.axis.min}if(aJ.to==null){aJ.to=aJ.axis.max}if(aD.to<aD.axis.min||aD.from>aD.axis.max||aJ.to<aJ.axis.min||aJ.from>aJ.axis.max){continue}aD.from=Math.max(aD.from,aD.axis.min);aD.to=Math.min(aD.to,aD.axis.max);aJ.from=Math.max(aJ.from,aJ.axis.min);aJ.to=Math.min(aJ.to,aJ.axis.max);if(aD.from==aD.to&&aJ.from==aJ.to){continue}aD.from=aD.axis.p2c(aD.from);aD.to=aD.axis.p2c(aD.to);aJ.from=aJ.axis.p2c(aJ.from);aJ.to=aJ.axis.p2c(aJ.to);if(aD.from==aD.to||aJ.from==aJ.to){I.beginPath();I.strokeStyle=aE.color||P.grid.markingsColor;I.lineWidth=aE.lineWidth||P.grid.markingsLineWidth;I.moveTo(aD.from,aJ.from);I.lineTo(aD.to,aJ.to);I.stroke()}else{I.fillStyle=aE.color||P.grid.markingsColor;I.fillRect(aD.from,aJ.to,aD.to-aD.from,aJ.from-aJ.to)}}}var aL=n(),aN=P.grid.borderWidth;for(var aF=0;aF<aL.length;++aF){var aC=aL[aF],aH=aC.box,aR=aC.tickLength,aO,aM,aQ,aK;if(!aC.show||aC.ticks.length==0){continue}I.strokeStyle=aC.options.tickColor||c.color.parse(aC.options.color).scale("a",0.22).toString();I.lineWidth=1;if(aC.direction=="x"){aO=0;if(aR=="full"){aM=(aC.position=="top"?0:x)}else{aM=aH.top-r.top+(aC.position=="top"?aH.height:0)}}else{aM=0;if(aR=="full"){aO=(aC.position=="left"?0:h)}else{aO=aH.left-r.left+(aC.position=="left"?aH.width:0)}}if(!aC.innermost){I.beginPath();aQ=aK=0;if(aC.direction=="x"){aQ=h}else{aK=x}if(I.lineWidth==1){aO=Math.floor(aO)+0.5;aM=Math.floor(aM)+0.5}I.moveTo(aO,aM);I.lineTo(aO+aQ,aM+aK);I.stroke()}I.beginPath();for(aG=0;aG<aC.ticks.length;++aG){var aP=aC.ticks[aG].v;aQ=aK=0;if(aP<aC.min||aP>aC.max||(aR=="full"&&aN>0&&(aP==aC.min||aP==aC.max))){continue}if(aC.direction=="x"){aO=aC.p2c(aP);aK=aR=="full"?-x:aR;if(aC.position=="top"){aK=-aK}}else{aM=aC.p2c(aP);aQ=aR=="full"?-h:aR;if(aC.position=="left"){aQ=-aQ}}if(I.lineWidth==1){if(aC.direction=="x"){aO=Math.floor(aO)+0.5}else{aM=Math.floor(aM)+0.5}}I.moveTo(aO,aM);I.lineTo(aO+aQ,aM+aK)}I.stroke()}if(aN){I.lineWidth=aN;I.strokeStyle=P.grid.borderColor;I.strokeRect(-aN/2,-aN/2,h+aN,x+aN)}I.restore()}function k(){aw.find(".tickLabels").remove();var aH=[\'<div class="tickLabels" style="font-size:smaller">\'];var aK=n();for(var aE=0;aE<aK.length;++aE){var aD=aK[aE],aG=aD.box;if(!aD.show){continue}aH.push(\'<div class="\'+aD.direction+"Axis "+aD.direction+aD.n+\'Axis" style="color:\'+aD.options.color+\'">\');for(var aF=0;aF<aD.ticks.length;++aF){var aI=aD.ticks[aF];if(!aI.label||aI.v<aD.min||aI.v>aD.max){continue}var aL={},aJ;if(aD.direction=="x"){aJ="center";aL.left=Math.round(r.left+aD.p2c(aI.v)-aD.labelWidth/2);if(aD.position=="bottom"){aL.top=aG.top+aG.padding}else{aL.bottom=J-(aG.top+aG.height-aG.padding)}}else{aL.top=Math.round(r.top+aD.p2c(aI.v)-aD.labelHeight/2);if(aD.position=="left"){aL.right=H-(aG.left+aG.width-aG.padding);aJ="right"}else{aL.left=aG.left+aG.padding;aJ="left"}}aL.width=aD.labelWidth;var aC=["position:absolute","text-align:"+aJ];for(var aM in aL){aC.push(aM+":"+aL[aM]+"px")}aH.push(\'<div class="tickLabel" style="\'+aC.join(";")+\'">\'+aI.label+"</div>")}aH.push("</div>")}aH.push("</div>");aw.append(aH.join(""))}function d(aC){if(aC.lines.show){au(aC)}if(aC.bars.show){e(aC)}if(aC.points.show){ap(aC)}}function au(aF){function aE(aQ,aR,aJ,aV,aU){var aW=aQ.points,aK=aQ.pointsize,aO=null,aN=null;I.beginPath();for(var aP=aK;aP<aW.length;aP+=aK){var aM=aW[aP-aK],aT=aW[aP-aK+1],aL=aW[aP],aS=aW[aP+1];if(aM==null||aL==null){continue}if(aT<=aS&&aT<aU.min){if(aS<aU.min){continue}aM=(aU.min-aT)/(aS-aT)*(aL-aM)+aM;aT=aU.min}else{if(aS<=aT&&aS<aU.min){if(aT<aU.min){continue}aL=(aU.min-aT)/(aS-aT)*(aL-aM)+aM;aS=aU.min}}if(aT>=aS&&aT>aU.max){if(aS>aU.max){continue}aM=(aU.max-aT)/(aS-aT)*(aL-aM)+aM;aT=aU.max}else{if(aS>=aT&&aS>aU.max){if(aT>aU.max){continue}aL=(aU.max-aT)/(aS-aT)*(aL-aM)+aM;aS=aU.max}}if(aM<=aL&&aM<aV.min){if(aL<aV.min){continue}aT=(aV.min-aM)/(aL-aM)*(aS-aT)+aT;aM=aV.min}else{if(aL<=aM&&aL<aV.min){if(aM<aV.min){continue}aS=(aV.min-aM)/(aL-aM)*(aS-aT)+aT;aL=aV.min}}if(aM>=aL&&aM>aV.max){if(aL>aV.max){continue}aT=(aV.max-aM)/(aL-aM)*(aS-aT)+aT;aM=aV.max}else{if(aL>=aM&&aL>aV.max){if(aM>aV.max){continue}aS=(aV.max-aM)/(aL-aM)*(aS-aT)+aT;aL=aV.max}}if(aM!=aO||aT!=aN){I.moveTo(aV.p2c(aM)+aR,aU.p2c(aT)+aJ)}aO=aL;aN=aS;I.lineTo(aV.p2c(aL)+aR,aU.p2c(aS)+aJ)}I.stroke()}function aG(aJ,aR,aQ){var aX=aJ.points,aW=aJ.pointsize,aO=Math.min(Math.max(0,aQ.min),aQ.max),aY=0,aV,aU=false,aN=1,aM=0,aS=0;while(true){if(aW>0&&aY>aX.length+aW){break}aY+=aW;var a0=aX[aY-aW],aL=aX[aY-aW+aN],aZ=aX[aY],aK=aX[aY+aN];if(aU){if(aW>0&&a0!=null&&aZ==null){aS=aY;aW=-aW;aN=2;continue}if(aW<0&&aY==aM+aW){I.fill();aU=false;aW=-aW;aN=1;aY=aM=aS+aW;continue}}if(a0==null||aZ==null){continue}if(a0<=aZ&&a0<aR.min){if(aZ<aR.min){continue}aL=(aR.min-a0)/(aZ-a0)*(aK-aL)+aL;a0=aR.min}else{if(aZ<=a0&&aZ<aR.min){if(a0<aR.min){continue}aK=(aR.min-a0)/(aZ-a0)*(aK-aL)+aL;aZ=aR.min}}if(a0>=aZ&&a0>aR.max){if(aZ>aR.max){continue}aL=(aR.max-a0)/(aZ-a0)*(aK-aL)+aL;a0=aR.max}else{if(aZ>=a0&&aZ>aR.max){if(a0>aR.max){continue}aK=(aR.max-a0)/(aZ-a0)*(aK-aL)+aL;aZ=aR.max}}if(!aU){I.beginPath();I.moveTo(aR.p2c(a0),aQ.p2c(aO));aU=true}if(aL>=aQ.max&&aK>=aQ.max){I.lineTo(aR.p2c(a0),aQ.p2c(aQ.max));I.lineTo(aR.p2c(aZ),aQ.p2c(aQ.max));continue}else{if(aL<=aQ.min&&aK<=aQ.min){I.lineTo(aR.p2c(a0),aQ.p2c(aQ.min));I.lineTo(aR.p2c(aZ),aQ.p2c(aQ.min));continue}}var aP=a0,aT=aZ;if(aL<=aK&&aL<aQ.min&&aK>=aQ.min){a0=(aQ.min-aL)/(aK-aL)*(aZ-a0)+a0;aL=aQ.min}else{if(aK<=aL&&aK<aQ.min&&aL>=aQ.min){aZ=(aQ.min-aL)/(aK-aL)*(aZ-a0)+a0;aK=aQ.min}}if(aL>=aK&&aL>aQ.max&&aK<=aQ.max){a0=(aQ.max-aL)/(aK-aL)*(aZ-a0)+a0;aL=aQ.max}else{if(aK>=aL&&aK>aQ.max&&aL<=aQ.max){aZ=(aQ.max-aL)/(aK-aL)*(aZ-a0)+a0;aK=aQ.max}}if(a0!=aP){I.lineTo(aR.p2c(aP),aQ.p2c(aL))}I.lineTo(aR.p2c(a0),aQ.p2c(aL));I.lineTo(aR.p2c(aZ),aQ.p2c(aK));if(aZ!=aT){I.lineTo(aR.p2c(aZ),aQ.p2c(aK));I.lineTo(aR.p2c(aT),aQ.p2c(aK))}}}I.save();I.translate(r.left,r.top);I.lineJoin="round";var aH=aF.lines.lineWidth,aC=aF.shadowSize;if(aH>0&&aC>0){I.lineWidth=aC;I.strokeStyle="rgba(0,0,0,0.1)";var aI=Math.PI/18;aE(aF.datapoints,Math.sin(aI)*(aH/2+aC/2),Math.cos(aI)*(aH/2+aC/2),aF.xaxis,aF.yaxis);I.lineWidth=aC/2;aE(aF.datapoints,Math.sin(aI)*(aH/2+aC/4),Math.cos(aI)*(aH/2+aC/4),aF.xaxis,aF.yaxis)}I.lineWidth=aH;I.strokeStyle=aF.color;var aD=af(aF.lines,aF.color,0,x);if(aD){I.fillStyle=aD;aG(aF.datapoints,aF.xaxis,aF.yaxis)}if(aH>0){aE(aF.datapoints,0,0,aF.xaxis,aF.yaxis)}I.restore()}function ap(aF){function aI(aO,aN,aV,aL,aT,aU,aR,aK){var aS=aO.points,aJ=aO.pointsize;for(var aM=0;aM<aS.length;aM+=aJ){var aQ=aS[aM],aP=aS[aM+1];if(aQ==null||aQ<aU.min||aQ>aU.max||aP<aR.min||aP>aR.max){continue}I.beginPath();aQ=aU.p2c(aQ);aP=aR.p2c(aP)+aL;if(aK=="circle"){I.arc(aQ,aP,aN,0,aT?Math.PI:Math.PI*2,false)}else{aK(I,aQ,aP,aN,aT)}I.closePath();if(aV){I.fillStyle=aV;I.fill()}I.stroke()}}I.save();I.translate(r.left,r.top);var aH=aF.points.lineWidth,aD=aF.shadowSize,aC=aF.points.radius,aG=aF.points.symbol;if(aH>0&&aD>0){var aE=aD/2;I.lineWidth=aE;I.strokeStyle="rgba(0,0,0,0.1)";aI(aF.datapoints,aC,null,aE+aE/2,true,aF.xaxis,aF.yaxis,aG);I.strokeStyle="rgba(0,0,0,0.2)";aI(aF.datapoints,aC,null,aE/2,true,aF.xaxis,aF.yaxis,aG)}I.lineWidth=aH;I.strokeStyle=aF.color;aI(aF.datapoints,aC,af(aF.points,aF.color),0,false,aF.xaxis,aF.yaxis,aG);I.restore()}function F(aO,aN,aW,aJ,aR,aG,aE,aM,aL,aV,aS,aD){var aF,aU,aK,aQ,aH,aC,aP,aI,aT;if(aS){aI=aC=aP=true;aH=false;aF=aW;aU=aO;aQ=aN+aJ;aK=aN+aR;if(aU<aF){aT=aU;aU=aF;aF=aT;aH=true;aC=false}}else{aH=aC=aP=true;aI=false;aF=aO+aJ;aU=aO+aR;aK=aW;aQ=aN;if(aQ<aK){aT=aQ;aQ=aK;aK=aT;aI=true;aP=false}}if(aU<aM.min||aF>aM.max||aQ<aL.min||aK>aL.max){return}if(aF<aM.min){aF=aM.min;aH=false}if(aU>aM.max){aU=aM.max;aC=false}if(aK<aL.min){aK=aL.min;aI=false}if(aQ>aL.max){aQ=aL.max;aP=false}aF=aM.p2c(aF);aK=aL.p2c(aK);aU=aM.p2c(aU);aQ=aL.p2c(aQ);if(aE){aV.beginPath();aV.moveTo(aF,aK);aV.lineTo(aF,aQ);aV.lineTo(aU,aQ);aV.lineTo(aU,aK);aV.fillStyle=aE(aK,aQ);aV.fill()}if(aD>0&&(aH||aC||aP||aI)){aV.beginPath();aV.moveTo(aF,aK+aG);if(aH){aV.lineTo(aF,aQ+aG)}else{aV.moveTo(aF,aQ+aG)}if(aP){aV.lineTo(aU,aQ+aG)}else{aV.moveTo(aU,aQ+aG)}if(aC){aV.lineTo(aU,aK+aG)}else{aV.moveTo(aU,aK+aG)}if(aI){aV.lineTo(aF,aK+aG)}else{aV.moveTo(aF,aK+aG)}aV.stroke()}}function e(aE){function aD(aK,aJ,aM,aH,aL,aO,aN){var aP=aK.points,aG=aK.pointsize;for(var aI=0;aI<aP.length;aI+=aG){if(aP[aI]==null){continue}F(aP[aI],aP[aI+1],aP[aI+2],aJ,aM,aH,aL,aO,aN,I,aE.bars.horizontal,aE.bars.lineWidth)}}I.save();I.translate(r.left,r.top);I.lineWidth=aE.bars.lineWidth;I.strokeStyle=aE.color;var aC=aE.bars.align=="left"?0:-aE.bars.barWidth/2;var aF=aE.bars.fill?function(aG,aH){return af(aE.bars,aE.color,aG,aH)}:null;aD(aE.datapoints,aC,aC+aE.bars.barWidth,0,aF,aE.xaxis,aE.yaxis);I.restore()}function af(aE,aC,aD,aG){var aF=aE.fill;if(!aF){return null}if(aE.fillColor){return an(aE.fillColor,aD,aG,aC)}var aH=c.color.parse(aC);aH.a=typeof aF=="number"?aF:0.4;aH.normalize();return aH.toString()}function p(){aw.find(".legend").remove();if(!P.legend.show){return}var aI=[],aG=false,aO=P.legend.labelFormatter,aN,aK;for(var aF=0;aF<R.length;++aF){aN=R[aF];aK=aN.label;if(!aK){continue}if(aF%P.legend.noColumns==0){if(aG){aI.push("</tr>")}aI.push("<tr>");aG=true}if(aO){aK=aO(aK,aN)}aI.push(\'<td class="legendColorBox"><div style="border:1px solid \'+P.legend.labelBoxBorderColor+\';padding:1px"><div style="width:4px;height:0;border:5px solid \'+aN.color+\';overflow:hidden"></div></div></td><td class="legendLabel">\'+aK+"</td>")}if(aG){aI.push("</tr>")}if(aI.length==0){return}var aM=\'<table style="font-size:smaller;color:\'+P.grid.color+\'">\'+aI.join("")+"</table>";if(P.legend.container!=null){c(P.legend.container).html(aM)}else{var aJ="",aD=P.legend.position,aE=P.legend.margin;if(aE[0]==null){aE=[aE,aE]}if(aD.charAt(0)=="n"){aJ+="top:"+(aE[1]+r.top)+"px;"}else{if(aD.charAt(0)=="s"){aJ+="bottom:"+(aE[1]+r.bottom)+"px;"}}if(aD.charAt(1)=="e"){aJ+="right:"+(aE[0]+r.right)+"px;"}else{if(aD.charAt(1)=="w"){aJ+="left:"+(aE[0]+r.left)+"px;"}}var aL=c(\'<div class="legend">\'+aM.replace(\'style="\',\'style="position:absolute;\'+aJ+";")+"</div>").appendTo(aw);if(P.legend.backgroundOpacity!=0){var aH=P.legend.backgroundColor;if(aH==null){aH=P.grid.backgroundColor;if(aH&&typeof aH=="string"){aH=c.color.parse(aH)}else{aH=c.color.extract(aL,"background-color")}aH.a=1;aH=aH.toString()}var aC=aL.children();c(\'<div style="position:absolute;width:\'+aC.width()+"px;height:"+aC.height()+"px;"+aJ+"background-color:"+aH+\';"> </div>\').prependTo(aL).css("opacity",P.legend.backgroundOpacity)}}}var ac=[],N=null;function L(aJ,aH,aE){var aP=P.grid.mouseActiveRadius,a1=aP*aP+1,aZ=null,aS=false,aX,aV;for(aX=R.length-1;aX>=0;--aX){if(!aE(R[aX])){continue}var aQ=R[aX],aI=aQ.xaxis,aG=aQ.yaxis,aW=aQ.datapoints.points,aU=aQ.datapoints.pointsize,aR=aI.c2p(aJ),aO=aG.c2p(aH),aD=aP/aI.scale,aC=aP/aG.scale;if(aI.options.inverseTransform){aD=Number.MAX_VALUE}if(aG.options.inverseTransform){aC=Number.MAX_VALUE}if(aQ.lines.show||aQ.points.show){for(aV=0;aV<aW.length;aV+=aU){var aL=aW[aV],aK=aW[aV+1];if(aL==null){continue}if(aL-aR>aD||aL-aR<-aD||aK-aO>aC||aK-aO<-aC){continue}var aN=Math.abs(aI.p2c(aL)-aJ),aM=Math.abs(aG.p2c(aK)-aH),aT=aN*aN+aM*aM;if(aT<a1){a1=aT;aZ=[aX,aV/aU]}}}if(aQ.bars.show&&!aZ){var aF=aQ.bars.align=="left"?0:-aQ.bars.barWidth/2,aY=aF+aQ.bars.barWidth;for(aV=0;aV<aW.length;aV+=aU){var aL=aW[aV],aK=aW[aV+1],a0=aW[aV+2];if(aL==null){continue}if(R[aX].bars.horizontal?(aR<=Math.max(a0,aL)&&aR>=Math.min(a0,aL)&&aO>=aK+aF&&aO<=aK+aY):(aR>=aL+aF&&aR<=aL+aY&&aO>=Math.min(a0,aK)&&aO<=Math.max(a0,aK))){aZ=[aX,aV/aU]}}}}if(aZ){aX=aZ[0];aV=aZ[1];aU=R[aX].datapoints.pointsize;return{datapoint:R[aX].datapoints.points.slice(aV*aU,(aV+1)*aU),dataIndex:aV,series:R[aX],seriesIndex:aX}}return null}function ab(aC){if(P.grid.hoverable){v("plothover",aC,function(aD){return aD.hoverable!=false})}}function l(aC){if(P.grid.hoverable){v("plothover",aC,function(aD){return false})}}function S(aC){v("plotclick",aC,function(aD){return aD.clickable!=false})}function v(aD,aC,aE){var aF=z.offset(),aI=aC.pageX-aF.left-r.left,aG=aC.pageY-aF.top-r.top,aK=D({left:aI,top:aG});aK.pageX=aC.pageX;aK.pageY=aC.pageY;var aL=L(aI,aG,aE);if(aL){aL.pageX=parseInt(aL.series.xaxis.p2c(aL.datapoint[0])+aF.left+r.left);aL.pageY=parseInt(aL.series.yaxis.p2c(aL.datapoint[1])+aF.top+r.top)}if(P.grid.autoHighlight){for(var aH=0;aH<ac.length;++aH){var aJ=ac[aH];if(aJ.auto==aD&&!(aL&&aJ.series==aL.series&&aJ.point[0]==aL.datapoint[0]&&aJ.point[1]==aL.datapoint[1])){U(aJ.series,aJ.point)}}if(aL){y(aL.series,aL.datapoint,aD)}}aw.trigger(aD,[aK,aL])}function f(){if(!N){N=setTimeout(t,30)}}function t(){N=null;B.save();B.clearRect(0,0,H,J);B.translate(r.left,r.top);var aD,aC;for(aD=0;aD<ac.length;++aD){aC=ac[aD];if(aC.series.bars.show){w(aC.series,aC.point)}else{az(aC.series,aC.point)}}B.restore();ao(al.drawOverlay,[B])}function y(aE,aC,aG){if(typeof aE=="number"){aE=R[aE]}if(typeof aC=="number"){var aF=aE.datapoints.pointsize;aC=aE.datapoints.points.slice(aF*aC,aF*(aC+1))}var aD=am(aE,aC);if(aD==-1){ac.push({series:aE,point:aC,auto:aG});f()}else{if(!aG){ac[aD].auto=false}}}function U(aE,aC){if(aE==null&&aC==null){ac=[];f()}if(typeof aE=="number"){aE=R[aE]}if(typeof aC=="number"){aC=aE.data[aC]}var aD=am(aE,aC);if(aD!=-1){ac.splice(aD,1);f()}}function am(aE,aF){for(var aC=0;aC<ac.length;++aC){var aD=ac[aC];if(aD.series==aE&&aD.point[0]==aF[0]&&aD.point[1]==aF[1]){return aC}}return -1}function az(aF,aE){var aD=aE[0],aJ=aE[1],aI=aF.xaxis,aH=aF.yaxis;if(aD<aI.min||aD>aI.max||aJ<aH.min||aJ>aH.max){return}var aG=aF.points.radius+aF.points.lineWidth/2;B.lineWidth=aG;B.strokeStyle=c.color.parse(aF.color).scale("a",0.5).toString();var aC=1.5*aG,aD=aI.p2c(aD),aJ=aH.p2c(aJ);B.beginPath();if(aF.points.symbol=="circle"){B.arc(aD,aJ,aC,0,2*Math.PI,false)}else{aF.points.symbol(B,aD,aJ,aC,false)}B.closePath();B.stroke()}function w(aF,aC){B.lineWidth=aF.bars.lineWidth;B.strokeStyle=c.color.parse(aF.color).scale("a",0.5).toString();var aE=c.color.parse(aF.color).scale("a",0.5).toString();var aD=aF.bars.align=="left"?0:-aF.bars.barWidth/2;F(aC[0],aC[1],aC[2]||0,aD,aD+aF.bars.barWidth,0,function(){return aE},aF.xaxis,aF.yaxis,B,aF.bars.horizontal,aF.bars.lineWidth)}function an(aK,aC,aI,aD){if(typeof aK=="string"){return aK}else{var aJ=I.createLinearGradient(0,aI,0,aC);for(var aF=0,aE=aK.colors.length;aF<aE;++aF){var aG=aK.colors[aF];if(typeof aG!="string"){var aH=c.color.parse(aD);if(aG.brightness!=null){aH=aH.scale("rgb",aG.brightness)}if(aG.opacity!=null){aH.a*=aG.opacity}aG=aH.toString()}aJ.addColorStop(aF/(aE-1),aG)}return aJ}}}c.plot=function(g,e,d){var f=new b(c(g),e,d,c.plot.plugins);return f};c.plot.version="0.7";c.plot.plugins=[];c.plot.formatDate=function(l,f,h){var p=function(d){d=""+d;return d.length==1?"0"+d:d};var e=[];var q=false,j=false;var o=l.getUTCHours();var k=o<12;if(h==null){h=["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]}if(f.search(/%p|%P/)!=-1){if(o>12){o=o-12}else{if(o==0){o=12}}}for(var g=0;g<f.length;++g){var n=f.charAt(g);if(q){switch(n){case"h":n=""+o;break;case"H":n=p(o);break;case"M":n=p(l.getUTCMinutes());break;case"S":n=p(l.getUTCSeconds());break;case"d":n=""+l.getUTCDate();break;case"m":n=""+(l.getUTCMonth()+1);break;case"y":n=""+l.getUTCFullYear();break;case"b":n=""+h[l.getUTCMonth()];break;case"p":n=(k)?("am"):("pm");break;case"P":n=(k)?("AM"):("PM");break;case"0":n="";j=true;break}if(n&&j){n=p(n);j=false}e.push(n);if(!j){q=false}}else{if(n=="%"){q=true}else{e.push(n)}}}return e.join("")};function a(e,d){return d*Math.floor(e/d)}})(jQuery);(function(b){var a={series:{stack:null}};function c(f){function d(k,j){var h=null;for(var g=0;g<j.length;++g){if(k==j[g]){break}if(j[g].stack==k.stack){h=j[g]}}return h}function e(C,v,g){if(v.stack==null){return}var p=d(v,C.getData());if(!p){return}var z=g.pointsize,F=g.points,h=p.datapoints.pointsize,y=p.datapoints.points,t=[],x,w,k,J,I,r,u=v.lines.show,G=v.bars.horizontal,o=z>2&&(G?g.format[2].x:g.format[2].y),n=u&&v.lines.steps,E=true,q=G?1:0,H=G?0:1,D=0,B=0,A;while(true){if(D>=F.length){break}A=t.length;if(F[D]==null){for(m=0;m<z;++m){t.push(F[D+m])}D+=z}else{if(B>=y.length){if(!u){for(m=0;m<z;++m){t.push(F[D+m])}}D+=z}else{if(y[B]==null){for(m=0;m<z;++m){t.push(null)}E=true;B+=h}else{x=F[D+q];w=F[D+H];J=y[B+q];I=y[B+H];r=0;if(x==J){for(m=0;m<z;++m){t.push(F[D+m])}t[A+H]+=I;r=I;D+=z;B+=h}else{if(x>J){if(u&&D>0&&F[D-z]!=null){k=w+(F[D-z+H]-w)*(J-x)/(F[D-z+q]-x);t.push(J);t.push(k+I);for(m=2;m<z;++m){t.push(F[D+m])}r=I}B+=h}else{if(E&&u){D+=z;continue}for(m=0;m<z;++m){t.push(F[D+m])}if(u&&B>0&&y[B-h]!=null){r=I+(y[B-h+H]-I)*(x-J)/(y[B-h+q]-J)}t[A+H]+=r;D+=z}}E=false;if(A!=t.length&&o){t[A+2]+=r}}}}if(n&&A!=t.length&&A>0&&t[A]!=null&&t[A]!=t[A-z]&&t[A+1]!=t[A-z+1]){for(m=0;m<z;++m){t[A+z+m]=t[A+m]}t[A+1]=t[A-z+1]}}g.points=t}f.hooks.processDatapoints.push(e)}b.plot.plugins.push({init:c,options:a,name:"stack",version:"1.2"})})(jQuery);
(function(b){function c(D){var h=null;var L=null;var n=null;var B=null;var p=null;var M=0;var F=true;var o=10;var w=0.95;var A=0;var d=false;var z=false;var j=[];D.hooks.processOptions.push(g);D.hooks.bindEvents.push(e);function g(O,N){if(N.series.pie.show){N.grid.show=false;if(N.series.pie.label.show=="auto"){if(N.legend.show){N.series.pie.label.show=false}else{N.series.pie.label.show=true}}if(N.series.pie.radius=="auto"){if(N.series.pie.label.show){N.series.pie.radius=3/4}else{N.series.pie.radius=1}}if(N.series.pie.tilt>1){N.series.pie.tilt=1}if(N.series.pie.tilt<0){N.series.pie.tilt=0}O.hooks.processDatapoints.push(E);O.hooks.drawOverlay.push(H);O.hooks.draw.push(r)}}function e(P,N){var O=P.getOptions();if(O.series.pie.show&&O.grid.hoverable){N.unbind("mousemove").mousemove(t)}if(O.series.pie.show&&O.grid.clickable){N.unbind("click").click(l)}}function G(O){var P="";function N(S,T){if(!T){T=0}for(var R=0;R<S.length;++R){for(var Q=0;Q<T;Q++){P+="\\t"}if(typeof S[R]=="object"){P+=""+R+":\\n";N(S[R],T+1)}else{P+=""+R+": "+S[R]+"\\n"}}}N(O);alert(P)}function q(P){for(var N=0;N<P.length;++N){var O=parseFloat(P[N].data[0][1]);if(O){M+=O}}}function E(Q,N,O,P){if(!d){d=true;h=Q.getCanvas();L=b(h).parent();a=Q.getOptions();Q.setData(K(Q.getData()))}}function I(){A=L.children().filter(".legend").children().width();n=Math.min(h.width,(h.height/a.series.pie.tilt))/2;p=(h.height/2)+a.series.pie.offset.top;B=(h.width/2);if(a.series.pie.offset.left=="auto"){if(a.legend.position.match("w")){B+=A/2}else{B-=A/2}}else{B+=a.series.pie.offset.left}if(B<n){B=n}else{if(B>h.width-n){B=h.width-n}}}function v(O){for(var N=0;N<O.length;++N){if(typeof(O[N].data)=="number"){O[N].data=[[1,O[N].data]]}else{if(typeof(O[N].data)=="undefined"||typeof(O[N].data[0])=="undefined"){if(typeof(O[N].data)!="undefined"&&typeof(O[N].data.label)!="undefined"){O[N].label=O[N].data.label}O[N].data=[[1,0]]}}}return O}function K(Q){Q=v(Q);q(Q);var P=0;var S=0;var N=a.series.pie.combine.color;var R=[];for(var O=0;O<Q.length;++O){Q[O].data[0][1]=parseFloat(Q[O].data[0][1]);if(!Q[O].data[0][1]){Q[O].data[0][1]=0}if(Q[O].data[0][1]/M<=a.series.pie.combine.threshold){P+=Q[O].data[0][1];S++;if(!N){N=Q[O].color}}else{R.push({data:[[1,Q[O].data[0][1]]],color:Q[O].color,label:Q[O].label,angle:(Q[O].data[0][1]*(Math.PI*2))/M,percent:(Q[O].data[0][1]/M*100)})}}if(S>0){R.push({data:[[1,P]],color:N,label:a.series.pie.combine.label,angle:(P*(Math.PI*2))/M,percent:(P/M*100)})}return R}function r(S,Q){if(!L){return}ctx=Q;I();var T=S.getData();var P=0;while(F&&P<o){F=false;if(P>0){n*=w}P+=1;N();if(a.series.pie.tilt<=0.8){O()}R()}if(P>=o){N();L.prepend(\'<div class="error">Could not draw pie with labels contained inside canvas</div>\')}if(S.setSeries&&S.insertLegend){S.setSeries(T);S.insertLegend()}function N(){ctx.clearRect(0,0,h.width,h.height);L.children().filter(".pieLabel, .pieLabelBackground").remove()}function O(){var Z=5;var Y=15;var W=10;var X=0.02;if(a.series.pie.radius>1){var U=a.series.pie.radius}else{var U=n*a.series.pie.radius}if(U>=(h.width/2)-Z||U*a.series.pie.tilt>=(h.height/2)-Y||U<=W){return}ctx.save();ctx.translate(Z,Y);ctx.globalAlpha=X;ctx.fillStyle="#000";ctx.translate(B,p);ctx.scale(1,a.series.pie.tilt);for(var V=1;V<=W;V++){ctx.beginPath();ctx.arc(0,0,U,0,Math.PI*2,false);ctx.fill();U-=V}ctx.restore()}function R(){startAngle=Math.PI*a.series.pie.startAngle;if(a.series.pie.radius>1){var U=a.series.pie.radius}else{var U=n*a.series.pie.radius}ctx.save();ctx.translate(B,p);ctx.scale(1,a.series.pie.tilt);ctx.save();var Y=startAngle;for(var W=0;W<T.length;++W){T[W].startAngle=Y;X(T[W].angle,T[W].color,true)}ctx.restore();ctx.save();ctx.lineWidth=a.series.pie.stroke.width;Y=startAngle;for(var W=0;W<T.length;++W){X(T[W].angle,a.series.pie.stroke.color,false)}ctx.restore();J(ctx);if(a.series.pie.label.show){V()}ctx.restore();function X(ab,Z,aa){if(ab<=0){return}if(aa){ctx.fillStyle=Z}else{ctx.strokeStyle=Z;ctx.lineJoin="round"}ctx.beginPath();if(Math.abs(ab-Math.PI*2)>1e-9){ctx.moveTo(0,0)}else{if(b.browser.msie){ab-=0.0001}}ctx.arc(0,0,U,Y,Y+ab,false);ctx.closePath();Y+=ab;if(aa){ctx.fill()}else{ctx.stroke()}}function V(){var ac=startAngle;if(a.series.pie.label.radius>1){var Z=a.series.pie.label.radius}else{var Z=n*a.series.pie.label.radius}for(var ab=0;ab<T.length;++ab){if(T[ab].percent>=a.series.pie.label.threshold*100){aa(T[ab],ac,ab)}ac+=T[ab].angle}function aa(ap,ai,ag){if(ap.data[0][1]==0){return}var ar=a.legend.labelFormatter,aq,ae=a.series.pie.label.formatter;if(ar){aq=ar(ap.label,ap)}else{aq=ap.label}if(ae){aq=ae(aq,ap)}var aj=((ai+ap.angle)+ai)/2;var ao=B+Math.round(Math.cos(aj)*Z);var am=p+Math.round(Math.sin(aj)*Z)*a.series.pie.tilt;var af=\'<span class="pieLabel" id="pieLabel\'+ag+\'" style="position:absolute;top:\'+am+"px;left:"+ao+\'px;">\'+aq+"</span>";L.append(af);var an=L.children("#pieLabel"+ag);var ad=(am-an.height()/2);var ah=(ao-an.width()/2);an.css("top",ad);an.css("left",ah);if(0-ad>0||0-ah>0||h.height-(ad+an.height())<0||h.width-(ah+an.width())<0){F=true}if(a.series.pie.label.background.opacity!=0){var ak=a.series.pie.label.background.color;if(ak==null){ak=ap.color}var al="top:"+ad+"px;left:"+ah+"px;";b(\'<div class="pieLabelBackground" style="position:absolute;width:\'+an.width()+"px;height:"+an.height()+"px;"+al+"background-color:"+ak+\';"> </div>\').insertBefore(an).css("opacity",a.series.pie.label.background.opacity)}}}}}function J(N){if(a.series.pie.innerRadius>0){N.save();innerRadius=a.series.pie.innerRadius>1?a.series.pie.innerRadius:n*a.series.pie.innerRadius;N.globalCompositeOperation="destination-out";N.beginPath();N.fillStyle=a.series.pie.stroke.color;N.arc(0,0,innerRadius,0,Math.PI*2,false);N.fill();N.closePath();N.restore();N.save();N.beginPath();N.strokeStyle=a.series.pie.stroke.color;N.arc(0,0,innerRadius,0,Math.PI*2,false);N.stroke();N.closePath();N.restore()}}function s(Q,R){for(var S=false,P=-1,N=Q.length,O=N-1;++P<N;O=P){((Q[P][1]<=R[1]&&R[1]<Q[O][1])||(Q[O][1]<=R[1]&&R[1]<Q[P][1]))&&(R[0]<(Q[O][0]-Q[P][0])*(R[1]-Q[P][1])/(Q[O][1]-Q[P][1])+Q[P][0])&&(S=!S)}return S}function u(R,P){var T=D.getData(),O=D.getOptions(),N=O.series.pie.radius>1?O.series.pie.radius:n*O.series.pie.radius;for(var Q=0;Q<T.length;++Q){var S=T[Q];if(S.pie.show){ctx.save();ctx.beginPath();ctx.moveTo(0,0);ctx.arc(0,0,N,S.startAngle,S.startAngle+S.angle,false);ctx.closePath();x=R-B;y=P-p;if(ctx.isPointInPath){if(ctx.isPointInPath(R-B,P-p)){ctx.restore();return{datapoint:[S.percent,S.data],dataIndex:0,series:S,seriesIndex:Q}}}else{p1X=(N*Math.cos(S.startAngle));p1Y=(N*Math.sin(S.startAngle));p2X=(N*Math.cos(S.startAngle+(S.angle/4)));p2Y=(N*Math.sin(S.startAngle+(S.angle/4)));p3X=(N*Math.cos(S.startAngle+(S.angle/2)));p3Y=(N*Math.sin(S.startAngle+(S.angle/2)));p4X=(N*Math.cos(S.startAngle+(S.angle/1.5)));p4Y=(N*Math.sin(S.startAngle+(S.angle/1.5)));p5X=(N*Math.cos(S.startAngle+S.angle));p5Y=(N*Math.sin(S.startAngle+S.angle));arrPoly=[[0,0],[p1X,p1Y],[p2X,p2Y],[p3X,p3Y],[p4X,p4Y],[p5X,p5Y]];arrPoint=[x,y];if(s(arrPoly,arrPoint)){ctx.restore();return{datapoint:[S.percent,S.data],dataIndex:0,series:S,seriesIndex:Q}}}ctx.restore()}}return null}function t(N){m("plothover",N)}function l(N){m("plotclick",N)}function m(N,T){var O=D.offset(),R=parseInt(T.pageX-O.left),P=parseInt(T.pageY-O.top),V=u(R,P);if(a.grid.autoHighlight){for(var Q=0;Q<j.length;++Q){var S=j[Q];if(S.auto==N&&!(V&&S.series==V.series)){f(S.series)}}}if(V){k(V.series,N)}var U={pageX:T.pageX,pageY:T.pageY};L.trigger(N,[U,V])}function k(O,P){if(typeof O=="number"){O=series[O]}var N=C(O);if(N==-1){j.push({series:O,auto:P});D.triggerRedrawOverlay()}else{if(!P){j[N].auto=false}}}function f(O){if(O==null){j=[];D.triggerRedrawOverlay()}if(typeof O=="number"){O=series[O]}var N=C(O);if(N!=-1){j.splice(N,1);D.triggerRedrawOverlay()}}function C(P){for(var N=0;N<j.length;++N){var O=j[N];if(O.series==P){return N}}return -1}function H(Q,R){var P=Q.getOptions();var N=P.series.pie.radius>1?P.series.pie.radius:n*P.series.pie.radius;R.save();R.translate(B,p);R.scale(1,P.series.pie.tilt);for(i=0;i<j.length;++i){O(j[i].series)}J(R);R.restore();function O(S){if(S.angle<0){return}R.fillStyle="rgba(255, 255, 255, "+P.series.pie.highlight.opacity+")";R.beginPath();if(Math.abs(S.angle-Math.PI*2)>1e-9){R.moveTo(0,0)}R.arc(0,0,N,S.startAngle,S.startAngle+S.angle,false);R.closePath();R.fill()}}}var a={series:{pie:{show:false,radius:"auto",innerRadius:0,startAngle:3/2,tilt:1,offset:{top:0,left:"auto"},stroke:{color:"#FFF",width:1},label:{show:"auto",formatter:function(d,e){return\'<div style="font-size:x-small;text-align:center;padding:2px;color:\'+e.color+\';">\'+d+"<br/>"+Math.round(e.percent)+"%</div>"},radius:1,background:{color:null,opacity:0},threshold:0},combine:{threshold:-1,color:null,label:"Other"},highlight:{opacity:0.5}}}};b.plot.plugins.push({init:c,options:a,name:"pie",version:"1.0"})})(jQuery);'''
ui = '''/*!\n * jQuery UI 1.8.9\n *\n * Copyright 2011, AUTHORS.txt (http://jqueryui.com/about)\n * Dual licensed under the MIT or GPL Version 2 licenses.\n * http://jquery.org/license\n *\n * http://docs.jquery.com/UI\n */\n(function(b,c){function f(g){return!b(g).parents().andSelf().filter(function(){return b.curCSS(this,"visibility")==="hidden"||b.expr.filters.hidden(this)}).length}b.ui=b.ui||{};if(!b.ui.version){b.extend(b.ui,{version:"1.8.9",keyCode:{ALT:18,BACKSPACE:8,CAPS_LOCK:20,COMMA:188,COMMAND:91,COMMAND_LEFT:91,COMMAND_RIGHT:93,CONTROL:17,DELETE:46,DOWN:40,END:35,ENTER:13,ESCAPE:27,HOME:36,INSERT:45,LEFT:37,MENU:93,NUMPAD_ADD:107,NUMPAD_DECIMAL:110,NUMPAD_DIVIDE:111,NUMPAD_ENTER:108,NUMPAD_MULTIPLY:106,\nNUMPAD_SUBTRACT:109,PAGE_DOWN:34,PAGE_UP:33,PERIOD:190,RIGHT:39,SHIFT:16,SPACE:32,TAB:9,UP:38,WINDOWS:91}});b.fn.extend({_focus:b.fn.focus,focus:function(g,e){return typeof g==="number"?this.each(function(){var a=this;setTimeout(function(){b(a).focus();e&&e.call(a)},g)}):this._focus.apply(this,arguments)},scrollParent:function(){var g;g=b.browser.msie&&/(static|relative)/.test(this.css("position"))||/absolute/.test(this.css("position"))?this.parents().filter(function(){return/(relative|absolute|fixed)/.test(b.curCSS(this,\n"position",1))&&/(auto|scroll)/.test(b.curCSS(this,"overflow",1)+b.curCSS(this,"overflow-y",1)+b.curCSS(this,"overflow-x",1))}).eq(0):this.parents().filter(function(){return/(auto|scroll)/.test(b.curCSS(this,"overflow",1)+b.curCSS(this,"overflow-y",1)+b.curCSS(this,"overflow-x",1))}).eq(0);return/fixed/.test(this.css("position"))||!g.length?b(document):g},zIndex:function(g){if(g!==c)return this.css("zIndex",g);if(this.length){g=b(this[0]);for(var e;g.length&&g[0]!==document;){e=g.css("position");\nif(e==="absolute"||e==="relative"||e==="fixed"){e=parseInt(g.css("zIndex"),10);if(!isNaN(e)&&e!==0)return e}g=g.parent()}}return 0},disableSelection:function(){return this.bind((b.support.selectstart?"selectstart":"mousedown")+".ui-disableSelection",function(g){g.preventDefault()})},enableSelection:function(){return this.unbind(".ui-disableSelection")}});b.each(["Width","Height"],function(g,e){function a(j,n,q,l){b.each(d,function(){n-=parseFloat(b.curCSS(j,"padding"+this,true))||0;if(q)n-=parseFloat(b.curCSS(j,\n"border"+this+"Width",true))||0;if(l)n-=parseFloat(b.curCSS(j,"margin"+this,true))||0});return n}var d=e==="Width"?["Left","Right"]:["Top","Bottom"],h=e.toLowerCase(),i={innerWidth:b.fn.innerWidth,innerHeight:b.fn.innerHeight,outerWidth:b.fn.outerWidth,outerHeight:b.fn.outerHeight};b.fn["inner"+e]=function(j){if(j===c)return i["inner"+e].call(this);return this.each(function(){b(this).css(h,a(this,j)+"px")})};b.fn["outer"+e]=function(j,n){if(typeof j!=="number")return i["outer"+e].call(this,j);return this.each(function(){b(this).css(h,\na(this,j,true,n)+"px")})}});b.extend(b.expr[":"],{data:function(g,e,a){return!!b.data(g,a[3])},focusable:function(g){var e=g.nodeName.toLowerCase(),a=b.attr(g,"tabindex");if("area"===e){e=g.parentNode;a=e.name;if(!g.href||!a||e.nodeName.toLowerCase()!=="map")return false;g=b("img[usemap=#"+a+"]")[0];return!!g&&f(g)}return(/input|select|textarea|button|object/.test(e)?!g.disabled:"a"==e?g.href||!isNaN(a):!isNaN(a))&&f(g)},tabbable:function(g){var e=b.attr(g,"tabindex");return(isNaN(e)||e>=0)&&b(g).is(":focusable")}});\nb(function(){var g=document.body,e=g.appendChild(e=document.createElement("div"));b.extend(e.style,{minHeight:"100px",height:"auto",padding:0,borderWidth:0});b.support.minHeight=e.offsetHeight===100;b.support.selectstart="onselectstart"in e;g.removeChild(e).style.display="none"});b.extend(b.ui,{plugin:{add:function(g,e,a){g=b.ui[g].prototype;for(var d in a){g.plugins[d]=g.plugins[d]||[];g.plugins[d].push([e,a[d]])}},call:function(g,e,a){if((e=g.plugins[e])&&g.element[0].parentNode)for(var d=0;d<e.length;d++)g.options[e[d][0]]&&\ne[d][1].apply(g.element,a)}},contains:function(g,e){return document.compareDocumentPosition?g.compareDocumentPosition(e)&16:g!==e&&g.contains(e)},hasScroll:function(g,e){if(b(g).css("overflow")==="hidden")return false;e=e&&e==="left"?"scrollLeft":"scrollTop";var a=false;if(g[e]>0)return true;g[e]=1;a=g[e]>0;g[e]=0;return a},isOverAxis:function(g,e,a){return g>e&&g<e+a},isOver:function(g,e,a,d,h,i){return b.ui.isOverAxis(g,a,h)&&b.ui.isOverAxis(e,d,i)}})}})(jQuery);\n(function(b,c){if(b.cleanData){var f=b.cleanData;b.cleanData=function(e){for(var a=0,d;(d=e[a])!=null;a++)b(d).triggerHandler("remove");f(e)}}else{var g=b.fn.remove;b.fn.remove=function(e,a){return this.each(function(){if(!a)if(!e||b.filter(e,[this]).length)b("*",this).add([this]).each(function(){b(this).triggerHandler("remove")});return g.call(b(this),e,a)})}}b.widget=function(e,a,d){var h=e.split(".")[0],i;e=e.split(".")[1];i=h+"-"+e;if(!d){d=a;a=b.Widget}b.expr[":"][i]=function(j){return!!b.data(j,\ne)};b[h]=b[h]||{};b[h][e]=function(j,n){arguments.length&&this._createWidget(j,n)};a=new a;a.options=b.extend(true,{},a.options);b[h][e].prototype=b.extend(true,a,{namespace:h,widgetName:e,widgetEventPrefix:b[h][e].prototype.widgetEventPrefix||e,widgetBaseClass:i},d);b.widget.bridge(e,b[h][e])};b.widget.bridge=function(e,a){b.fn[e]=function(d){var h=typeof d==="string",i=Array.prototype.slice.call(arguments,1),j=this;d=!h&&i.length?b.extend.apply(null,[true,d].concat(i)):d;if(h&&d.charAt(0)==="_")return j;\nh?this.each(function(){var n=b.data(this,e),q=n&&b.isFunction(n[d])?n[d].apply(n,i):n;if(q!==n&&q!==c){j=q;return false}}):this.each(function(){var n=b.data(this,e);n?n.option(d||{})._init():b.data(this,e,new a(d,this))});return j}};b.Widget=function(e,a){arguments.length&&this._createWidget(e,a)};b.Widget.prototype={widgetName:"widget",widgetEventPrefix:"",options:{disabled:false},_createWidget:function(e,a){b.data(a,this.widgetName,this);this.element=b(a);this.options=b.extend(true,{},this.options,\nthis._getCreateOptions(),e);var d=this;this.element.bind("remove."+this.widgetName,function(){d.destroy()});this._create();this._trigger("create");this._init()},_getCreateOptions:function(){return b.metadata&&b.metadata.get(this.element[0])[this.widgetName]},_create:function(){},_init:function(){},destroy:function(){this.element.unbind("."+this.widgetName).removeData(this.widgetName);this.widget().unbind("."+this.widgetName).removeAttr("aria-disabled").removeClass(this.widgetBaseClass+"-disabled ui-state-disabled")},\nwidget:function(){return this.element},option:function(e,a){var d=e;if(arguments.length===0)return b.extend({},this.options);if(typeof e==="string"){if(a===c)return this.options[e];d={};d[e]=a}this._setOptions(d);return this},_setOptions:function(e){var a=this;b.each(e,function(d,h){a._setOption(d,h)});return this},_setOption:function(e,a){this.options[e]=a;if(e==="disabled")this.widget()[a?"addClass":"removeClass"](this.widgetBaseClass+"-disabled ui-state-disabled").attr("aria-disabled",a);return this},\nenable:function(){return this._setOption("disabled",false)},disable:function(){return this._setOption("disabled",true)},_trigger:function(e,a,d){var h=this.options[e];a=b.Event(a);a.type=(e===this.widgetEventPrefix?e:this.widgetEventPrefix+e).toLowerCase();d=d||{};if(a.originalEvent){e=b.event.props.length;for(var i;e;){i=b.event.props[--e];a[i]=a.originalEvent[i]}}this.element.trigger(a,d);return!(b.isFunction(h)&&h.call(this.element[0],a,d)===false||a.isDefaultPrevented())}}})(jQuery);\n(function(b){b.widget("ui.mouse",{options:{cancel:":input,option",distance:1,delay:0},_mouseInit:function(){var c=this;this.element.bind("mousedown."+this.widgetName,function(f){return c._mouseDown(f)}).bind("click."+this.widgetName,function(f){if(true===b.data(f.target,c.widgetName+".preventClickEvent")){b.removeData(f.target,c.widgetName+".preventClickEvent");f.stopImmediatePropagation();return false}});this.started=false},_mouseDestroy:function(){this.element.unbind("."+this.widgetName)},_mouseDown:function(c){c.originalEvent=\nc.originalEvent||{};if(!c.originalEvent.mouseHandled){this._mouseStarted&&this._mouseUp(c);this._mouseDownEvent=c;var f=this,g=c.which==1,e=typeof this.options.cancel=="string"?b(c.target).parents().add(c.target).filter(this.options.cancel).length:false;if(!g||e||!this._mouseCapture(c))return true;this.mouseDelayMet=!this.options.delay;if(!this.mouseDelayMet)this._mouseDelayTimer=setTimeout(function(){f.mouseDelayMet=true},this.options.delay);if(this._mouseDistanceMet(c)&&this._mouseDelayMet(c)){this._mouseStarted=\nthis._mouseStart(c)!==false;if(!this._mouseStarted){c.preventDefault();return true}}this._mouseMoveDelegate=function(a){return f._mouseMove(a)};this._mouseUpDelegate=function(a){return f._mouseUp(a)};b(document).bind("mousemove."+this.widgetName,this._mouseMoveDelegate).bind("mouseup."+this.widgetName,this._mouseUpDelegate);c.preventDefault();return c.originalEvent.mouseHandled=true}},_mouseMove:function(c){if(b.browser.msie&&!(document.documentMode>=9)&&!c.button)return this._mouseUp(c);if(this._mouseStarted){this._mouseDrag(c);\nreturn c.preventDefault()}if(this._mouseDistanceMet(c)&&this._mouseDelayMet(c))(this._mouseStarted=this._mouseStart(this._mouseDownEvent,c)!==false)?this._mouseDrag(c):this._mouseUp(c);return!this._mouseStarted},_mouseUp:function(c){b(document).unbind("mousemove."+this.widgetName,this._mouseMoveDelegate).unbind("mouseup."+this.widgetName,this._mouseUpDelegate);if(this._mouseStarted){this._mouseStarted=false;c.target==this._mouseDownEvent.target&&b.data(c.target,this.widgetName+".preventClickEvent",\ntrue);this._mouseStop(c)}return false},_mouseDistanceMet:function(c){return Math.max(Math.abs(this._mouseDownEvent.pageX-c.pageX),Math.abs(this._mouseDownEvent.pageY-c.pageY))>=this.options.distance},_mouseDelayMet:function(){return this.mouseDelayMet},_mouseStart:function(){},_mouseDrag:function(){},_mouseStop:function(){},_mouseCapture:function(){return true}})})(jQuery);\n(function(b){b.widget("ui.draggable",b.ui.mouse,{widgetEventPrefix:"drag",options:{addClasses:true,appendTo:"parent",axis:false,connectToSortable:false,containment:false,cursor:"auto",cursorAt:false,grid:false,handle:false,helper:"original",iframeFix:false,opacity:false,refreshPositions:false,revert:false,revertDuration:500,scope:"default",scroll:true,scrollSensitivity:20,scrollSpeed:20,snap:false,snapMode:"both",snapTolerance:20,stack:false,zIndex:false},_create:function(){if(this.options.helper==\n"original"&&!/^(?:r|a|f)/.test(this.element.css("position")))this.element[0].style.position="relative";this.options.addClasses&&this.element.addClass("ui-draggable");this.options.disabled&&this.element.addClass("ui-draggable-disabled");this._mouseInit()},destroy:function(){if(this.element.data("draggable")){this.element.removeData("draggable").unbind(".draggable").removeClass("ui-draggable ui-draggable-dragging ui-draggable-disabled");this._mouseDestroy();return this}},_mouseCapture:function(c){var f=\nthis.options;if(this.helper||f.disabled||b(c.target).is(".ui-resizable-handle"))return false;this.handle=this._getHandle(c);if(!this.handle)return false;return true},_mouseStart:function(c){var f=this.options;this.helper=this._createHelper(c);this._cacheHelperProportions();if(b.ui.ddmanager)b.ui.ddmanager.current=this;this._cacheMargins();this.cssPosition=this.helper.css("position");this.scrollParent=this.helper.scrollParent();this.offset=this.positionAbs=this.element.offset();this.offset={top:this.offset.top-\nthis.margins.top,left:this.offset.left-this.margins.left};b.extend(this.offset,{click:{left:c.pageX-this.offset.left,top:c.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()});this.originalPosition=this.position=this._generatePosition(c);this.originalPageX=c.pageX;this.originalPageY=c.pageY;f.cursorAt&&this._adjustOffsetFromHelper(f.cursorAt);f.containment&&this._setContainment();if(this._trigger("start",c)===false){this._clear();return false}this._cacheHelperProportions();\nb.ui.ddmanager&&!f.dropBehaviour&&b.ui.ddmanager.prepareOffsets(this,c);this.helper.addClass("ui-draggable-dragging");this._mouseDrag(c,true);return true},_mouseDrag:function(c,f){this.position=this._generatePosition(c);this.positionAbs=this._convertPositionTo("absolute");if(!f){f=this._uiHash();if(this._trigger("drag",c,f)===false){this._mouseUp({});return false}this.position=f.position}if(!this.options.axis||this.options.axis!="y")this.helper[0].style.left=this.position.left+"px";if(!this.options.axis||\nthis.options.axis!="x")this.helper[0].style.top=this.position.top+"px";b.ui.ddmanager&&b.ui.ddmanager.drag(this,c);return false},_mouseStop:function(c){var f=false;if(b.ui.ddmanager&&!this.options.dropBehaviour)f=b.ui.ddmanager.drop(this,c);if(this.dropped){f=this.dropped;this.dropped=false}if((!this.element[0]||!this.element[0].parentNode)&&this.options.helper=="original")return false;if(this.options.revert=="invalid"&&!f||this.options.revert=="valid"&&f||this.options.revert===true||b.isFunction(this.options.revert)&&\nthis.options.revert.call(this.element,f)){var g=this;b(this.helper).animate(this.originalPosition,parseInt(this.options.revertDuration,10),function(){g._trigger("stop",c)!==false&&g._clear()})}else this._trigger("stop",c)!==false&&this._clear();return false},cancel:function(){this.helper.is(".ui-draggable-dragging")?this._mouseUp({}):this._clear();return this},_getHandle:function(c){var f=!this.options.handle||!b(this.options.handle,this.element).length?true:false;b(this.options.handle,this.element).find("*").andSelf().each(function(){if(this==\nc.target)f=true});return f},_createHelper:function(c){var f=this.options;c=b.isFunction(f.helper)?b(f.helper.apply(this.element[0],[c])):f.helper=="clone"?this.element.clone():this.element;c.parents("body").length||c.appendTo(f.appendTo=="parent"?this.element[0].parentNode:f.appendTo);c[0]!=this.element[0]&&!/(fixed|absolute)/.test(c.css("position"))&&c.css("position","absolute");return c},_adjustOffsetFromHelper:function(c){if(typeof c=="string")c=c.split(" ");if(b.isArray(c))c={left:+c[0],top:+c[1]||\n0};if("left"in c)this.offset.click.left=c.left+this.margins.left;if("right"in c)this.offset.click.left=this.helperProportions.width-c.right+this.margins.left;if("top"in c)this.offset.click.top=c.top+this.margins.top;if("bottom"in c)this.offset.click.top=this.helperProportions.height-c.bottom+this.margins.top},_getParentOffset:function(){this.offsetParent=this.helper.offsetParent();var c=this.offsetParent.offset();if(this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&b.ui.contains(this.scrollParent[0],\nthis.offsetParent[0])){c.left+=this.scrollParent.scrollLeft();c.top+=this.scrollParent.scrollTop()}if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&b.browser.msie)c={top:0,left:0};return{top:c.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:c.left+(parseInt(this.offsetParent.css("borderLeftWidth"),10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var c=this.element.position();return{top:c.top-\n(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:c.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}else return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.element.css("marginLeft"),10)||0,top:parseInt(this.element.css("marginTop"),10)||0}},_cacheHelperProportions:function(){this.helperProportions={width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var c=this.options;if(c.containment==\n"parent")c.containment=this.helper[0].parentNode;if(c.containment=="document"||c.containment=="window")this.containment=[(c.containment=="document"?0:b(window).scrollLeft())-this.offset.relative.left-this.offset.parent.left,(c.containment=="document"?0:b(window).scrollTop())-this.offset.relative.top-this.offset.parent.top,(c.containment=="document"?0:b(window).scrollLeft())+b(c.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(c.containment=="document"?\n0:b(window).scrollTop())+(b(c.containment=="document"?document:window).height()||document.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(document|window|parent)$/.test(c.containment)&&c.containment.constructor!=Array){var f=b(c.containment)[0];if(f){c=b(c.containment).offset();var g=b(f).css("overflow")!="hidden";this.containment=[c.left+(parseInt(b(f).css("borderLeftWidth"),10)||0)+(parseInt(b(f).css("paddingLeft"),10)||0)-this.margins.left,c.top+(parseInt(b(f).css("borderTopWidth"),\n10)||0)+(parseInt(b(f).css("paddingTop"),10)||0)-this.margins.top,c.left+(g?Math.max(f.scrollWidth,f.offsetWidth):f.offsetWidth)-(parseInt(b(f).css("borderLeftWidth"),10)||0)-(parseInt(b(f).css("paddingRight"),10)||0)-this.helperProportions.width-this.margins.left,c.top+(g?Math.max(f.scrollHeight,f.offsetHeight):f.offsetHeight)-(parseInt(b(f).css("borderTopWidth"),10)||0)-(parseInt(b(f).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top]}}else if(c.containment.constructor==\nArray)this.containment=c.containment},_convertPositionTo:function(c,f){if(!f)f=this.position;c=c=="absolute"?1:-1;var g=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&b.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,e=/(html|body)/i.test(g[0].tagName);return{top:f.top+this.offset.relative.top*c+this.offset.parent.top*c-(b.browser.safari&&b.browser.version<526&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():\ne?0:g.scrollTop())*c),left:f.left+this.offset.relative.left*c+this.offset.parent.left*c-(b.browser.safari&&b.browser.version<526&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():e?0:g.scrollLeft())*c)}},_generatePosition:function(c){var f=this.options,g=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&b.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,e=/(html|body)/i.test(g[0].tagName),a=c.pageX,d=c.pageY;\nif(this.originalPosition){if(this.containment){if(c.pageX-this.offset.click.left<this.containment[0])a=this.containment[0]+this.offset.click.left;if(c.pageY-this.offset.click.top<this.containment[1])d=this.containment[1]+this.offset.click.top;if(c.pageX-this.offset.click.left>this.containment[2])a=this.containment[2]+this.offset.click.left;if(c.pageY-this.offset.click.top>this.containment[3])d=this.containment[3]+this.offset.click.top}if(f.grid){d=this.originalPageY+Math.round((d-this.originalPageY)/\nf.grid[1])*f.grid[1];d=this.containment?!(d-this.offset.click.top<this.containment[1]||d-this.offset.click.top>this.containment[3])?d:!(d-this.offset.click.top<this.containment[1])?d-f.grid[1]:d+f.grid[1]:d;a=this.originalPageX+Math.round((a-this.originalPageX)/f.grid[0])*f.grid[0];a=this.containment?!(a-this.offset.click.left<this.containment[0]||a-this.offset.click.left>this.containment[2])?a:!(a-this.offset.click.left<this.containment[0])?a-f.grid[0]:a+f.grid[0]:a}}return{top:d-this.offset.click.top-\nthis.offset.relative.top-this.offset.parent.top+(b.browser.safari&&b.browser.version<526&&this.cssPosition=="fixed"?0:this.cssPosition=="fixed"?-this.scrollParent.scrollTop():e?0:g.scrollTop()),left:a-this.offset.click.left-this.offset.relative.left-this.offset.parent.left+(b.browser.safari&&b.browser.version<526&&this.cssPosition=="fixed"?0:this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():e?0:g.scrollLeft())}},_clear:function(){this.helper.removeClass("ui-draggable-dragging");this.helper[0]!=\nthis.element[0]&&!this.cancelHelperRemoval&&this.helper.remove();this.helper=null;this.cancelHelperRemoval=false},_trigger:function(c,f,g){g=g||this._uiHash();b.ui.plugin.call(this,c,[f,g]);if(c=="drag")this.positionAbs=this._convertPositionTo("absolute");return b.Widget.prototype._trigger.call(this,c,f,g)},plugins:{},_uiHash:function(){return{helper:this.helper,position:this.position,originalPosition:this.originalPosition,offset:this.positionAbs}}});b.extend(b.ui.draggable,{version:"1.8.9"});\nb.ui.plugin.add("draggable","connectToSortable",{start:function(c,f){var g=b(this).data("draggable"),e=g.options,a=b.extend({},f,{item:g.element});g.sortables=[];b(e.connectToSortable).each(function(){var d=b.data(this,"sortable");if(d&&!d.options.disabled){g.sortables.push({instance:d,shouldRevert:d.options.revert});d._refreshItems();d._trigger("activate",c,a)}})},stop:function(c,f){var g=b(this).data("draggable"),e=b.extend({},f,{item:g.element});b.each(g.sortables,function(){if(this.instance.isOver){this.instance.isOver=\n0;g.cancelHelperRemoval=true;this.instance.cancelHelperRemoval=false;if(this.shouldRevert)this.instance.options.revert=true;this.instance._mouseStop(c);this.instance.options.helper=this.instance.options._helper;g.options.helper=="original"&&this.instance.currentItem.css({top:"auto",left:"auto"})}else{this.instance.cancelHelperRemoval=false;this.instance._trigger("deactivate",c,e)}})},drag:function(c,f){var g=b(this).data("draggable"),e=this;b.each(g.sortables,function(){this.instance.positionAbs=\ng.positionAbs;this.instance.helperProportions=g.helperProportions;this.instance.offset.click=g.offset.click;if(this.instance._intersectsWith(this.instance.containerCache)){if(!this.instance.isOver){this.instance.isOver=1;this.instance.currentItem=b(e).clone().appendTo(this.instance.element).data("sortable-item",true);this.instance.options._helper=this.instance.options.helper;this.instance.options.helper=function(){return f.helper[0]};c.target=this.instance.currentItem[0];this.instance._mouseCapture(c,\ntrue);this.instance._mouseStart(c,true,true);this.instance.offset.click.top=g.offset.click.top;this.instance.offset.click.left=g.offset.click.left;this.instance.offset.parent.left-=g.offset.parent.left-this.instance.offset.parent.left;this.instance.offset.parent.top-=g.offset.parent.top-this.instance.offset.parent.top;g._trigger("toSortable",c);g.dropped=this.instance.element;g.currentItem=g.element;this.instance.fromOutside=g}this.instance.currentItem&&this.instance._mouseDrag(c)}else if(this.instance.isOver){this.instance.isOver=\n0;this.instance.cancelHelperRemoval=true;this.instance.options.revert=false;this.instance._trigger("out",c,this.instance._uiHash(this.instance));this.instance._mouseStop(c,true);this.instance.options.helper=this.instance.options._helper;this.instance.currentItem.remove();this.instance.placeholder&&this.instance.placeholder.remove();g._trigger("fromSortable",c);g.dropped=false}})}});b.ui.plugin.add("draggable","cursor",{start:function(){var c=b("body"),f=b(this).data("draggable").options;if(c.css("cursor"))f._cursor=\nc.css("cursor");c.css("cursor",f.cursor)},stop:function(){var c=b(this).data("draggable").options;c._cursor&&b("body").css("cursor",c._cursor)}});b.ui.plugin.add("draggable","iframeFix",{start:function(){var c=b(this).data("draggable").options;b(c.iframeFix===true?"iframe":c.iframeFix).each(function(){b(\'<div class="ui-draggable-iframeFix" style="background: #fff;"></div>\').css({width:this.offsetWidth+"px",height:this.offsetHeight+"px",position:"absolute",opacity:"0.001",zIndex:1E3}).css(b(this).offset()).appendTo("body")})},\nstop:function(){b("div.ui-draggable-iframeFix").each(function(){this.parentNode.removeChild(this)})}});b.ui.plugin.add("draggable","opacity",{start:function(c,f){c=b(f.helper);f=b(this).data("draggable").options;if(c.css("opacity"))f._opacity=c.css("opacity");c.css("opacity",f.opacity)},stop:function(c,f){c=b(this).data("draggable").options;c._opacity&&b(f.helper).css("opacity",c._opacity)}});b.ui.plugin.add("draggable","scroll",{start:function(){var c=b(this).data("draggable");if(c.scrollParent[0]!=\ndocument&&c.scrollParent[0].tagName!="HTML")c.overflowOffset=c.scrollParent.offset()},drag:function(c){var f=b(this).data("draggable"),g=f.options,e=false;if(f.scrollParent[0]!=document&&f.scrollParent[0].tagName!="HTML"){if(!g.axis||g.axis!="x")if(f.overflowOffset.top+f.scrollParent[0].offsetHeight-c.pageY<g.scrollSensitivity)f.scrollParent[0].scrollTop=e=f.scrollParent[0].scrollTop+g.scrollSpeed;else if(c.pageY-f.overflowOffset.top<g.scrollSensitivity)f.scrollParent[0].scrollTop=e=f.scrollParent[0].scrollTop-\ng.scrollSpeed;if(!g.axis||g.axis!="y")if(f.overflowOffset.left+f.scrollParent[0].offsetWidth-c.pageX<g.scrollSensitivity)f.scrollParent[0].scrollLeft=e=f.scrollParent[0].scrollLeft+g.scrollSpeed;else if(c.pageX-f.overflowOffset.left<g.scrollSensitivity)f.scrollParent[0].scrollLeft=e=f.scrollParent[0].scrollLeft-g.scrollSpeed}else{if(!g.axis||g.axis!="x")if(c.pageY-b(document).scrollTop()<g.scrollSensitivity)e=b(document).scrollTop(b(document).scrollTop()-g.scrollSpeed);else if(b(window).height()-\n(c.pageY-b(document).scrollTop())<g.scrollSensitivity)e=b(document).scrollTop(b(document).scrollTop()+g.scrollSpeed);if(!g.axis||g.axis!="y")if(c.pageX-b(document).scrollLeft()<g.scrollSensitivity)e=b(document).scrollLeft(b(document).scrollLeft()-g.scrollSpeed);else if(b(window).width()-(c.pageX-b(document).scrollLeft())<g.scrollSensitivity)e=b(document).scrollLeft(b(document).scrollLeft()+g.scrollSpeed)}e!==false&&b.ui.ddmanager&&!g.dropBehaviour&&b.ui.ddmanager.prepareOffsets(f,c)}});b.ui.plugin.add("draggable",\n"snap",{start:function(){var c=b(this).data("draggable"),f=c.options;c.snapElements=[];b(f.snap.constructor!=String?f.snap.items||":data(draggable)":f.snap).each(function(){var g=b(this),e=g.offset();this!=c.element[0]&&c.snapElements.push({item:this,width:g.outerWidth(),height:g.outerHeight(),top:e.top,left:e.left})})},drag:function(c,f){for(var g=b(this).data("draggable"),e=g.options,a=e.snapTolerance,d=f.offset.left,h=d+g.helperProportions.width,i=f.offset.top,j=i+g.helperProportions.height,n=\ng.snapElements.length-1;n>=0;n--){var q=g.snapElements[n].left,l=q+g.snapElements[n].width,k=g.snapElements[n].top,m=k+g.snapElements[n].height;if(q-a<d&&d<l+a&&k-a<i&&i<m+a||q-a<d&&d<l+a&&k-a<j&&j<m+a||q-a<h&&h<l+a&&k-a<i&&i<m+a||q-a<h&&h<l+a&&k-a<j&&j<m+a){if(e.snapMode!="inner"){var o=Math.abs(k-j)<=a,p=Math.abs(m-i)<=a,s=Math.abs(q-h)<=a,r=Math.abs(l-d)<=a;if(o)f.position.top=g._convertPositionTo("relative",{top:k-g.helperProportions.height,left:0}).top-g.margins.top;if(p)f.position.top=g._convertPositionTo("relative",\n{top:m,left:0}).top-g.margins.top;if(s)f.position.left=g._convertPositionTo("relative",{top:0,left:q-g.helperProportions.width}).left-g.margins.left;if(r)f.position.left=g._convertPositionTo("relative",{top:0,left:l}).left-g.margins.left}var u=o||p||s||r;if(e.snapMode!="outer"){o=Math.abs(k-i)<=a;p=Math.abs(m-j)<=a;s=Math.abs(q-d)<=a;r=Math.abs(l-h)<=a;if(o)f.position.top=g._convertPositionTo("relative",{top:k,left:0}).top-g.margins.top;if(p)f.position.top=g._convertPositionTo("relative",{top:m-g.helperProportions.height,\nleft:0}).top-g.margins.top;if(s)f.position.left=g._convertPositionTo("relative",{top:0,left:q}).left-g.margins.left;if(r)f.position.left=g._convertPositionTo("relative",{top:0,left:l-g.helperProportions.width}).left-g.margins.left}if(!g.snapElements[n].snapping&&(o||p||s||r||u))g.options.snap.snap&&g.options.snap.snap.call(g.element,c,b.extend(g._uiHash(),{snapItem:g.snapElements[n].item}));g.snapElements[n].snapping=o||p||s||r||u}else{g.snapElements[n].snapping&&g.options.snap.release&&g.options.snap.release.call(g.element,\nc,b.extend(g._uiHash(),{snapItem:g.snapElements[n].item}));g.snapElements[n].snapping=false}}}});b.ui.plugin.add("draggable","stack",{start:function(){var c=b(this).data("draggable").options;c=b.makeArray(b(c.stack)).sort(function(g,e){return(parseInt(b(g).css("zIndex"),10)||0)-(parseInt(b(e).css("zIndex"),10)||0)});if(c.length){var f=parseInt(c[0].style.zIndex)||0;b(c).each(function(g){this.style.zIndex=f+g});this[0].style.zIndex=f+c.length}}});b.ui.plugin.add("draggable","zIndex",{start:function(c,\nf){c=b(f.helper);f=b(this).data("draggable").options;if(c.css("zIndex"))f._zIndex=c.css("zIndex");c.css("zIndex",f.zIndex)},stop:function(c,f){c=b(this).data("draggable").options;c._zIndex&&b(f.helper).css("zIndex",c._zIndex)}})})(jQuery);\n(function(b){b.widget("ui.droppable",{widgetEventPrefix:"drop",options:{accept:"*",activeClass:false,addClasses:true,greedy:false,hoverClass:false,scope:"default",tolerance:"intersect"},_create:function(){var c=this.options,f=c.accept;this.isover=0;this.isout=1;this.accept=b.isFunction(f)?f:function(g){return g.is(f)};this.proportions={width:this.element[0].offsetWidth,height:this.element[0].offsetHeight};b.ui.ddmanager.droppables[c.scope]=b.ui.ddmanager.droppables[c.scope]||[];b.ui.ddmanager.droppables[c.scope].push(this);\nc.addClasses&&this.element.addClass("ui-droppable")},destroy:function(){for(var c=b.ui.ddmanager.droppables[this.options.scope],f=0;f<c.length;f++)c[f]==this&&c.splice(f,1);this.element.removeClass("ui-droppable ui-droppable-disabled").removeData("droppable").unbind(".droppable");return this},_setOption:function(c,f){if(c=="accept")this.accept=b.isFunction(f)?f:function(g){return g.is(f)};b.Widget.prototype._setOption.apply(this,arguments)},_activate:function(c){var f=b.ui.ddmanager.current;this.options.activeClass&&\nthis.element.addClass(this.options.activeClass);f&&this._trigger("activate",c,this.ui(f))},_deactivate:function(c){var f=b.ui.ddmanager.current;this.options.activeClass&&this.element.removeClass(this.options.activeClass);f&&this._trigger("deactivate",c,this.ui(f))},_over:function(c){var f=b.ui.ddmanager.current;if(!(!f||(f.currentItem||f.element)[0]==this.element[0]))if(this.accept.call(this.element[0],f.currentItem||f.element)){this.options.hoverClass&&this.element.addClass(this.options.hoverClass);\nthis._trigger("over",c,this.ui(f))}},_out:function(c){var f=b.ui.ddmanager.current;if(!(!f||(f.currentItem||f.element)[0]==this.element[0]))if(this.accept.call(this.element[0],f.currentItem||f.element)){this.options.hoverClass&&this.element.removeClass(this.options.hoverClass);this._trigger("out",c,this.ui(f))}},_drop:function(c,f){var g=f||b.ui.ddmanager.current;if(!g||(g.currentItem||g.element)[0]==this.element[0])return false;var e=false;this.element.find(":data(droppable)").not(".ui-draggable-dragging").each(function(){var a=\nb.data(this,"droppable");if(a.options.greedy&&!a.options.disabled&&a.options.scope==g.options.scope&&a.accept.call(a.element[0],g.currentItem||g.element)&&b.ui.intersect(g,b.extend(a,{offset:a.element.offset()}),a.options.tolerance)){e=true;return false}});if(e)return false;if(this.accept.call(this.element[0],g.currentItem||g.element)){this.options.activeClass&&this.element.removeClass(this.options.activeClass);this.options.hoverClass&&this.element.removeClass(this.options.hoverClass);this._trigger("drop",\nc,this.ui(g));return this.element}return false},ui:function(c){return{draggable:c.currentItem||c.element,helper:c.helper,position:c.position,offset:c.positionAbs}}});b.extend(b.ui.droppable,{version:"1.8.9"});b.ui.intersect=function(c,f,g){if(!f.offset)return false;var e=(c.positionAbs||c.position.absolute).left,a=e+c.helperProportions.width,d=(c.positionAbs||c.position.absolute).top,h=d+c.helperProportions.height,i=f.offset.left,j=i+f.proportions.width,n=f.offset.top,q=n+f.proportions.height;\nswitch(g){case "fit":return i<=e&&a<=j&&n<=d&&h<=q;case "intersect":return i<e+c.helperProportions.width/2&&a-c.helperProportions.width/2<j&&n<d+c.helperProportions.height/2&&h-c.helperProportions.height/2<q;case "pointer":return b.ui.isOver((c.positionAbs||c.position.absolute).top+(c.clickOffset||c.offset.click).top,(c.positionAbs||c.position.absolute).left+(c.clickOffset||c.offset.click).left,n,i,f.proportions.height,f.proportions.width);case "touch":return(d>=n&&d<=q||h>=n&&h<=q||d<n&&h>q)&&(e>=\ni&&e<=j||a>=i&&a<=j||e<i&&a>j);default:return false}};b.ui.ddmanager={current:null,droppables:{"default":[]},prepareOffsets:function(c,f){var g=b.ui.ddmanager.droppables[c.options.scope]||[],e=f?f.type:null,a=(c.currentItem||c.element).find(":data(droppable)").andSelf(),d=0;a:for(;d<g.length;d++)if(!(g[d].options.disabled||c&&!g[d].accept.call(g[d].element[0],c.currentItem||c.element))){for(var h=0;h<a.length;h++)if(a[h]==g[d].element[0]){g[d].proportions.height=0;continue a}g[d].visible=g[d].element.css("display")!=\n"none";if(g[d].visible){g[d].offset=g[d].element.offset();g[d].proportions={width:g[d].element[0].offsetWidth,height:g[d].element[0].offsetHeight};e=="mousedown"&&g[d]._activate.call(g[d],f)}}},drop:function(c,f){var g=false;b.each(b.ui.ddmanager.droppables[c.options.scope]||[],function(){if(this.options){if(!this.options.disabled&&this.visible&&b.ui.intersect(c,this,this.options.tolerance))g=g||this._drop.call(this,f);if(!this.options.disabled&&this.visible&&this.accept.call(this.element[0],c.currentItem||\nc.element)){this.isout=1;this.isover=0;this._deactivate.call(this,f)}}});return g},drag:function(c,f){c.options.refreshPositions&&b.ui.ddmanager.prepareOffsets(c,f);b.each(b.ui.ddmanager.droppables[c.options.scope]||[],function(){if(!(this.options.disabled||this.greedyChild||!this.visible)){var g=b.ui.intersect(c,this,this.options.tolerance);if(g=!g&&this.isover==1?"isout":g&&this.isover==0?"isover":null){var e;if(this.options.greedy){var a=this.element.parents(":data(droppable):eq(0)");if(a.length){e=\nb.data(a[0],"droppable");e.greedyChild=g=="isover"?1:0}}if(e&&g=="isover"){e.isover=0;e.isout=1;e._out.call(e,f)}this[g]=1;this[g=="isout"?"isover":"isout"]=0;this[g=="isover"?"_over":"_out"].call(this,f);if(e&&g=="isout"){e.isout=0;e.isover=1;e._over.call(e,f)}}}})}}})(jQuery);\n(function(b){b.widget("ui.resizable",b.ui.mouse,{widgetEventPrefix:"resize",options:{alsoResize:false,animate:false,animateDuration:"slow",animateEasing:"swing",aspectRatio:false,autoHide:false,containment:false,ghost:false,grid:false,handles:"e,s,se",helper:false,maxHeight:null,maxWidth:null,minHeight:10,minWidth:10,zIndex:1E3},_create:function(){var g=this,e=this.options;this.element.addClass("ui-resizable");b.extend(this,{_aspectRatio:!!e.aspectRatio,aspectRatio:e.aspectRatio,originalElement:this.element,\n_proportionallyResizeElements:[],_helper:e.helper||e.ghost||e.animate?e.helper||"ui-resizable-helper":null});if(this.element[0].nodeName.match(/canvas|textarea|input|select|button|img/i)){/relative/.test(this.element.css("position"))&&b.browser.opera&&this.element.css({position:"relative",top:"auto",left:"auto"});this.element.wrap(b(\'<div class="ui-wrapper" style="overflow: hidden;"></div>\').css({position:this.element.css("position"),width:this.element.outerWidth(),height:this.element.outerHeight(),\ntop:this.element.css("top"),left:this.element.css("left")}));this.element=this.element.parent().data("resizable",this.element.data("resizable"));this.elementIsWrapper=true;this.element.css({marginLeft:this.originalElement.css("marginLeft"),marginTop:this.originalElement.css("marginTop"),marginRight:this.originalElement.css("marginRight"),marginBottom:this.originalElement.css("marginBottom")});this.originalElement.css({marginLeft:0,marginTop:0,marginRight:0,marginBottom:0});this.originalResizeStyle=\nthis.originalElement.css("resize");this.originalElement.css("resize","none");this._proportionallyResizeElements.push(this.originalElement.css({position:"static",zoom:1,display:"block"}));this.originalElement.css({margin:this.originalElement.css("margin")});this._proportionallyResize()}this.handles=e.handles||(!b(".ui-resizable-handle",this.element).length?"e,s,se":{n:".ui-resizable-n",e:".ui-resizable-e",s:".ui-resizable-s",w:".ui-resizable-w",se:".ui-resizable-se",sw:".ui-resizable-sw",ne:".ui-resizable-ne",\nnw:".ui-resizable-nw"});if(this.handles.constructor==String){if(this.handles=="all")this.handles="n,e,s,w,se,sw,ne,nw";var a=this.handles.split(",");this.handles={};for(var d=0;d<a.length;d++){var h=b.trim(a[d]),i=b(\'<div class="ui-resizable-handle \'+("ui-resizable-"+h)+\'"></div>\');/sw|se|ne|nw/.test(h)&&i.css({zIndex:++e.zIndex});"se"==h&&i.addClass("ui-icon ui-icon-gripsmall-diagonal-se");this.handles[h]=".ui-resizable-"+h;this.element.append(i)}}this._renderAxis=function(j){j=j||this.element;for(var n in this.handles){if(this.handles[n].constructor==\nString)this.handles[n]=b(this.handles[n],this.element).show();if(this.elementIsWrapper&&this.originalElement[0].nodeName.match(/textarea|input|select|button/i)){var q=b(this.handles[n],this.element),l=0;l=/sw|ne|nw|se|n|s/.test(n)?q.outerHeight():q.outerWidth();q=["padding",/ne|nw|n/.test(n)?"Top":/se|sw|s/.test(n)?"Bottom":/^e$/.test(n)?"Right":"Left"].join("");j.css(q,l);this._proportionallyResize()}b(this.handles[n])}};this._renderAxis(this.element);this._handles=b(".ui-resizable-handle",this.element).disableSelection();\nthis._handles.mouseover(function(){if(!g.resizing){if(this.className)var j=this.className.match(/ui-resizable-(se|sw|ne|nw|n|e|s|w)/i);g.axis=j&&j[1]?j[1]:"se"}});if(e.autoHide){this._handles.hide();b(this.element).addClass("ui-resizable-autohide").hover(function(){b(this).removeClass("ui-resizable-autohide");g._handles.show()},function(){if(!g.resizing){b(this).addClass("ui-resizable-autohide");g._handles.hide()}})}this._mouseInit()},destroy:function(){this._mouseDestroy();var g=function(a){b(a).removeClass("ui-resizable ui-resizable-disabled ui-resizable-resizing").removeData("resizable").unbind(".resizable").find(".ui-resizable-handle").remove()};\nif(this.elementIsWrapper){g(this.element);var e=this.element;e.after(this.originalElement.css({position:e.css("position"),width:e.outerWidth(),height:e.outerHeight(),top:e.css("top"),left:e.css("left")})).remove()}this.originalElement.css("resize",this.originalResizeStyle);g(this.originalElement);return this},_mouseCapture:function(g){var e=false;for(var a in this.handles)if(b(this.handles[a])[0]==g.target)e=true;return!this.options.disabled&&e},_mouseStart:function(g){var e=this.options,a=this.element.position(),\nd=this.element;this.resizing=true;this.documentScroll={top:b(document).scrollTop(),left:b(document).scrollLeft()};if(d.is(".ui-draggable")||/absolute/.test(d.css("position")))d.css({position:"absolute",top:a.top,left:a.left});b.browser.opera&&/relative/.test(d.css("position"))&&d.css({position:"relative",top:"auto",left:"auto"});this._renderProxy();a=c(this.helper.css("left"));var h=c(this.helper.css("top"));if(e.containment){a+=b(e.containment).scrollLeft()||0;h+=b(e.containment).scrollTop()||0}this.offset=\nthis.helper.offset();this.position={left:a,top:h};this.size=this._helper?{width:d.outerWidth(),height:d.outerHeight()}:{width:d.width(),height:d.height()};this.originalSize=this._helper?{width:d.outerWidth(),height:d.outerHeight()}:{width:d.width(),height:d.height()};this.originalPosition={left:a,top:h};this.sizeDiff={width:d.outerWidth()-d.width(),height:d.outerHeight()-d.height()};this.originalMousePosition={left:g.pageX,top:g.pageY};this.aspectRatio=typeof e.aspectRatio=="number"?e.aspectRatio:\nthis.originalSize.width/this.originalSize.height||1;e=b(".ui-resizable-"+this.axis).css("cursor");b("body").css("cursor",e=="auto"?this.axis+"-resize":e);d.addClass("ui-resizable-resizing");this._propagate("start",g);return true},_mouseDrag:function(g){var e=this.helper,a=this.originalMousePosition,d=this._change[this.axis];if(!d)return false;a=d.apply(this,[g,g.pageX-a.left||0,g.pageY-a.top||0]);if(this._aspectRatio||g.shiftKey)a=this._updateRatio(a,g);a=this._respectSize(a,g);this._propagate("resize",\ng);e.css({top:this.position.top+"px",left:this.position.left+"px",width:this.size.width+"px",height:this.size.height+"px"});!this._helper&&this._proportionallyResizeElements.length&&this._proportionallyResize();this._updateCache(a);this._trigger("resize",g,this.ui());return false},_mouseStop:function(g){this.resizing=false;var e=this.options,a=this;if(this._helper){var d=this._proportionallyResizeElements,h=d.length&&/textarea/i.test(d[0].nodeName);d=h&&b.ui.hasScroll(d[0],"left")?0:a.sizeDiff.height;\nh={width:a.size.width-(h?0:a.sizeDiff.width),height:a.size.height-d};d=parseInt(a.element.css("left"),10)+(a.position.left-a.originalPosition.left)||null;var i=parseInt(a.element.css("top"),10)+(a.position.top-a.originalPosition.top)||null;e.animate||this.element.css(b.extend(h,{top:i,left:d}));a.helper.height(a.size.height);a.helper.width(a.size.width);this._helper&&!e.animate&&this._proportionallyResize()}b("body").css("cursor","auto");this.element.removeClass("ui-resizable-resizing");this._propagate("stop",\ng);this._helper&&this.helper.remove();return false},_updateCache:function(g){this.offset=this.helper.offset();if(f(g.left))this.position.left=g.left;if(f(g.top))this.position.top=g.top;if(f(g.height))this.size.height=g.height;if(f(g.width))this.size.width=g.width},_updateRatio:function(g){var e=this.position,a=this.size,d=this.axis;if(g.height)g.width=a.height*this.aspectRatio;else if(g.width)g.height=a.width/this.aspectRatio;if(d=="sw"){g.left=e.left+(a.width-g.width);g.top=null}if(d=="nw"){g.top=\ne.top+(a.height-g.height);g.left=e.left+(a.width-g.width)}return g},_respectSize:function(g){var e=this.options,a=this.axis,d=f(g.width)&&e.maxWidth&&e.maxWidth<g.width,h=f(g.height)&&e.maxHeight&&e.maxHeight<g.height,i=f(g.width)&&e.minWidth&&e.minWidth>g.width,j=f(g.height)&&e.minHeight&&e.minHeight>g.height;if(i)g.width=e.minWidth;if(j)g.height=e.minHeight;if(d)g.width=e.maxWidth;if(h)g.height=e.maxHeight;var n=this.originalPosition.left+this.originalSize.width,q=this.position.top+this.size.height,\nl=/sw|nw|w/.test(a);a=/nw|ne|n/.test(a);if(i&&l)g.left=n-e.minWidth;if(d&&l)g.left=n-e.maxWidth;if(j&&a)g.top=q-e.minHeight;if(h&&a)g.top=q-e.maxHeight;if((e=!g.width&&!g.height)&&!g.left&&g.top)g.top=null;else if(e&&!g.top&&g.left)g.left=null;return g},_proportionallyResize:function(){if(this._proportionallyResizeElements.length)for(var g=this.helper||this.element,e=0;e<this._proportionallyResizeElements.length;e++){var a=this._proportionallyResizeElements[e];if(!this.borderDif){var d=[a.css("borderTopWidth"),\na.css("borderRightWidth"),a.css("borderBottomWidth"),a.css("borderLeftWidth")],h=[a.css("paddingTop"),a.css("paddingRight"),a.css("paddingBottom"),a.css("paddingLeft")];this.borderDif=b.map(d,function(i,j){i=parseInt(i,10)||0;j=parseInt(h[j],10)||0;return i+j})}b.browser.msie&&(b(g).is(":hidden")||b(g).parents(":hidden").length)||a.css({height:g.height()-this.borderDif[0]-this.borderDif[2]||0,width:g.width()-this.borderDif[1]-this.borderDif[3]||0})}},_renderProxy:function(){var g=this.options;this.elementOffset=\nthis.element.offset();if(this._helper){this.helper=this.helper||b(\'<div style="overflow:hidden;"></div>\');var e=b.browser.msie&&b.browser.version<7,a=e?1:0;e=e?2:-1;this.helper.addClass(this._helper).css({width:this.element.outerWidth()+e,height:this.element.outerHeight()+e,position:"absolute",left:this.elementOffset.left-a+"px",top:this.elementOffset.top-a+"px",zIndex:++g.zIndex});this.helper.appendTo("body").disableSelection()}else this.helper=this.element},_change:{e:function(g,e){return{width:this.originalSize.width+\ne}},w:function(g,e){return{left:this.originalPosition.left+e,width:this.originalSize.width-e}},n:function(g,e,a){return{top:this.originalPosition.top+a,height:this.originalSize.height-a}},s:function(g,e,a){return{height:this.originalSize.height+a}},se:function(g,e,a){return b.extend(this._change.s.apply(this,arguments),this._change.e.apply(this,[g,e,a]))},sw:function(g,e,a){return b.extend(this._change.s.apply(this,arguments),this._change.w.apply(this,[g,e,a]))},ne:function(g,e,a){return b.extend(this._change.n.apply(this,\narguments),this._change.e.apply(this,[g,e,a]))},nw:function(g,e,a){return b.extend(this._change.n.apply(this,arguments),this._change.w.apply(this,[g,e,a]))}},_propagate:function(g,e){b.ui.plugin.call(this,g,[e,this.ui()]);g!="resize"&&this._trigger(g,e,this.ui())},plugins:{},ui:function(){return{originalElement:this.originalElement,element:this.element,helper:this.helper,position:this.position,size:this.size,originalSize:this.originalSize,originalPosition:this.originalPosition}}});b.extend(b.ui.resizable,\n{version:"1.8.9"});b.ui.plugin.add("resizable","alsoResize",{start:function(){var g=b(this).data("resizable").options,e=function(a){b(a).each(function(){var d=b(this);d.data("resizable-alsoresize",{width:parseInt(d.width(),10),height:parseInt(d.height(),10),left:parseInt(d.css("left"),10),top:parseInt(d.css("top"),10),position:d.css("position")})})};if(typeof g.alsoResize=="object"&&!g.alsoResize.parentNode)if(g.alsoResize.length){g.alsoResize=g.alsoResize[0];e(g.alsoResize)}else b.each(g.alsoResize,\nfunction(a){e(a)});else e(g.alsoResize)},resize:function(g,e){var a=b(this).data("resizable");g=a.options;var d=a.originalSize,h=a.originalPosition,i={height:a.size.height-d.height||0,width:a.size.width-d.width||0,top:a.position.top-h.top||0,left:a.position.left-h.left||0},j=function(n,q){b(n).each(function(){var l=b(this),k=b(this).data("resizable-alsoresize"),m={},o=q&&q.length?q:l.parents(e.originalElement[0]).length?["width","height"]:["width","height","top","left"];b.each(o,function(p,s){if((p=\n(k[s]||0)+(i[s]||0))&&p>=0)m[s]=p||null});if(b.browser.opera&&/relative/.test(l.css("position"))){a._revertToRelativePosition=true;l.css({position:"absolute",top:"auto",left:"auto"})}l.css(m)})};typeof g.alsoResize=="object"&&!g.alsoResize.nodeType?b.each(g.alsoResize,function(n,q){j(n,q)}):j(g.alsoResize)},stop:function(){var g=b(this).data("resizable"),e=g.options,a=function(d){b(d).each(function(){var h=b(this);h.css({position:h.data("resizable-alsoresize").position})})};if(g._revertToRelativePosition){g._revertToRelativePosition=\nfalse;typeof e.alsoResize=="object"&&!e.alsoResize.nodeType?b.each(e.alsoResize,function(d){a(d)}):a(e.alsoResize)}b(this).removeData("resizable-alsoresize")}});b.ui.plugin.add("resizable","animate",{stop:function(g){var e=b(this).data("resizable"),a=e.options,d=e._proportionallyResizeElements,h=d.length&&/textarea/i.test(d[0].nodeName),i=h&&b.ui.hasScroll(d[0],"left")?0:e.sizeDiff.height;h={width:e.size.width-(h?0:e.sizeDiff.width),height:e.size.height-i};i=parseInt(e.element.css("left"),10)+(e.position.left-\ne.originalPosition.left)||null;var j=parseInt(e.element.css("top"),10)+(e.position.top-e.originalPosition.top)||null;e.element.animate(b.extend(h,j&&i?{top:j,left:i}:{}),{duration:a.animateDuration,easing:a.animateEasing,step:function(){var n={width:parseInt(e.element.css("width"),10),height:parseInt(e.element.css("height"),10),top:parseInt(e.element.css("top"),10),left:parseInt(e.element.css("left"),10)};d&&d.length&&b(d[0]).css({width:n.width,height:n.height});e._updateCache(n);e._propagate("resize",\ng)}})}});b.ui.plugin.add("resizable","containment",{start:function(){var g=b(this).data("resizable"),e=g.element,a=g.options.containment;if(e=a instanceof b?a.get(0):/parent/.test(a)?e.parent().get(0):a){g.containerElement=b(e);if(/document/.test(a)||a==document){g.containerOffset={left:0,top:0};g.containerPosition={left:0,top:0};g.parentData={element:b(document),left:0,top:0,width:b(document).width(),height:b(document).height()||document.body.parentNode.scrollHeight}}else{var d=b(e),h=[];b(["Top",\n"Right","Left","Bottom"]).each(function(n,q){h[n]=c(d.css("padding"+q))});g.containerOffset=d.offset();g.containerPosition=d.position();g.containerSize={height:d.innerHeight()-h[3],width:d.innerWidth()-h[1]};a=g.containerOffset;var i=g.containerSize.height,j=g.containerSize.width;j=b.ui.hasScroll(e,"left")?e.scrollWidth:j;i=b.ui.hasScroll(e)?e.scrollHeight:i;g.parentData={element:e,left:a.left,top:a.top,width:j,height:i}}}},resize:function(g){var e=b(this).data("resizable"),a=e.options,d=e.containerOffset,\nh=e.position;g=e._aspectRatio||g.shiftKey;var i={top:0,left:0},j=e.containerElement;if(j[0]!=document&&/static/.test(j.css("position")))i=d;if(h.left<(e._helper?d.left:0)){e.size.width+=e._helper?e.position.left-d.left:e.position.left-i.left;if(g)e.size.height=e.size.width/a.aspectRatio;e.position.left=a.helper?d.left:0}if(h.top<(e._helper?d.top:0)){e.size.height+=e._helper?e.position.top-d.top:e.position.top;if(g)e.size.width=e.size.height*a.aspectRatio;e.position.top=e._helper?d.top:0}e.offset.left=\ne.parentData.left+e.position.left;e.offset.top=e.parentData.top+e.position.top;a=Math.abs((e._helper?e.offset.left-i.left:e.offset.left-i.left)+e.sizeDiff.width);d=Math.abs((e._helper?e.offset.top-i.top:e.offset.top-d.top)+e.sizeDiff.height);h=e.containerElement.get(0)==e.element.parent().get(0);i=/relative|absolute/.test(e.containerElement.css("position"));if(h&&i)a-=e.parentData.left;if(a+e.size.width>=e.parentData.width){e.size.width=e.parentData.width-a;if(g)e.size.height=e.size.width/e.aspectRatio}if(d+\ne.size.height>=e.parentData.height){e.size.height=e.parentData.height-d;if(g)e.size.width=e.size.height*e.aspectRatio}},stop:function(){var g=b(this).data("resizable"),e=g.options,a=g.containerOffset,d=g.containerPosition,h=g.containerElement,i=b(g.helper),j=i.offset(),n=i.outerWidth()-g.sizeDiff.width;i=i.outerHeight()-g.sizeDiff.height;g._helper&&!e.animate&&/relative/.test(h.css("position"))&&b(this).css({left:j.left-d.left-a.left,width:n,height:i});g._helper&&!e.animate&&/static/.test(h.css("position"))&&\nb(this).css({left:j.left-d.left-a.left,width:n,height:i})}});b.ui.plugin.add("resizable","ghost",{start:function(){var g=b(this).data("resizable"),e=g.options,a=g.size;g.ghost=g.originalElement.clone();g.ghost.css({opacity:0.25,display:"block",position:"relative",height:a.height,width:a.width,margin:0,left:0,top:0}).addClass("ui-resizable-ghost").addClass(typeof e.ghost=="string"?e.ghost:"");g.ghost.appendTo(g.helper)},resize:function(){var g=b(this).data("resizable");g.ghost&&g.ghost.css({position:"relative",\nheight:g.size.height,width:g.size.width})},stop:function(){var g=b(this).data("resizable");g.ghost&&g.helper&&g.helper.get(0).removeChild(g.ghost.get(0))}});b.ui.plugin.add("resizable","grid",{resize:function(){var g=b(this).data("resizable"),e=g.options,a=g.size,d=g.originalSize,h=g.originalPosition,i=g.axis;e.grid=typeof e.grid=="number"?[e.grid,e.grid]:e.grid;var j=Math.round((a.width-d.width)/(e.grid[0]||1))*(e.grid[0]||1);e=Math.round((a.height-d.height)/(e.grid[1]||1))*(e.grid[1]||1);if(/^(se|s|e)$/.test(i)){g.size.width=\nd.width+j;g.size.height=d.height+e}else if(/^(ne)$/.test(i)){g.size.width=d.width+j;g.size.height=d.height+e;g.position.top=h.top-e}else{if(/^(sw)$/.test(i)){g.size.width=d.width+j;g.size.height=d.height+e}else{g.size.width=d.width+j;g.size.height=d.height+e;g.position.top=h.top-e}g.position.left=h.left-j}}});var c=function(g){return parseInt(g,10)||0},f=function(g){return!isNaN(parseInt(g,10))}})(jQuery);\n(function(b){b.widget("ui.selectable",b.ui.mouse,{options:{appendTo:"body",autoRefresh:true,distance:0,filter:"*",tolerance:"touch"},_create:function(){var c=this;this.element.addClass("ui-selectable");this.dragged=false;var f;this.refresh=function(){f=b(c.options.filter,c.element[0]);f.each(function(){var g=b(this),e=g.offset();b.data(this,"selectable-item",{element:this,$element:g,left:e.left,top:e.top,right:e.left+g.outerWidth(),bottom:e.top+g.outerHeight(),startselected:false,selected:g.hasClass("ui-selected"),\nselecting:g.hasClass("ui-selecting"),unselecting:g.hasClass("ui-unselecting")})})};this.refresh();this.selectees=f.addClass("ui-selectee");this._mouseInit();this.helper=b("<div class=\'ui-selectable-helper\'></div>")},destroy:function(){this.selectees.removeClass("ui-selectee").removeData("selectable-item");this.element.removeClass("ui-selectable ui-selectable-disabled").removeData("selectable").unbind(".selectable");this._mouseDestroy();return this},_mouseStart:function(c){var f=this;this.opos=[c.pageX,\nc.pageY];if(!this.options.disabled){var g=this.options;this.selectees=b(g.filter,this.element[0]);this._trigger("start",c);b(g.appendTo).append(this.helper);this.helper.css({left:c.clientX,top:c.clientY,width:0,height:0});g.autoRefresh&&this.refresh();this.selectees.filter(".ui-selected").each(function(){var e=b.data(this,"selectable-item");e.startselected=true;if(!c.metaKey){e.$element.removeClass("ui-selected");e.selected=false;e.$element.addClass("ui-unselecting");e.unselecting=true;f._trigger("unselecting",\nc,{unselecting:e.element})}});b(c.target).parents().andSelf().each(function(){var e=b.data(this,"selectable-item");if(e){var a=!c.metaKey||!e.$element.hasClass("ui-selected");e.$element.removeClass(a?"ui-unselecting":"ui-selected").addClass(a?"ui-selecting":"ui-unselecting");e.unselecting=!a;e.selecting=a;(e.selected=a)?f._trigger("selecting",c,{selecting:e.element}):f._trigger("unselecting",c,{unselecting:e.element});return false}})}},_mouseDrag:function(c){var f=this;this.dragged=true;if(!this.options.disabled){var g=\nthis.options,e=this.opos[0],a=this.opos[1],d=c.pageX,h=c.pageY;if(e>d){var i=d;d=e;e=i}if(a>h){i=h;h=a;a=i}this.helper.css({left:e,top:a,width:d-e,height:h-a});this.selectees.each(function(){var j=b.data(this,"selectable-item");if(!(!j||j.element==f.element[0])){var n=false;if(g.tolerance=="touch")n=!(j.left>d||j.right<e||j.top>h||j.bottom<a);else if(g.tolerance=="fit")n=j.left>e&&j.right<d&&j.top>a&&j.bottom<h;if(n){if(j.selected){j.$element.removeClass("ui-selected");j.selected=false}if(j.unselecting){j.$element.removeClass("ui-unselecting");\nj.unselecting=false}if(!j.selecting){j.$element.addClass("ui-selecting");j.selecting=true;f._trigger("selecting",c,{selecting:j.element})}}else{if(j.selecting)if(c.metaKey&&j.startselected){j.$element.removeClass("ui-selecting");j.selecting=false;j.$element.addClass("ui-selected");j.selected=true}else{j.$element.removeClass("ui-selecting");j.selecting=false;if(j.startselected){j.$element.addClass("ui-unselecting");j.unselecting=true}f._trigger("unselecting",c,{unselecting:j.element})}if(j.selected)if(!c.metaKey&&\n!j.startselected){j.$element.removeClass("ui-selected");j.selected=false;j.$element.addClass("ui-unselecting");j.unselecting=true;f._trigger("unselecting",c,{unselecting:j.element})}}}});return false}},_mouseStop:function(c){var f=this;this.dragged=false;b(".ui-unselecting",this.element[0]).each(function(){var g=b.data(this,"selectable-item");g.$element.removeClass("ui-unselecting");g.unselecting=false;g.startselected=false;f._trigger("unselected",c,{unselected:g.element})});b(".ui-selecting",this.element[0]).each(function(){var g=\nb.data(this,"selectable-item");g.$element.removeClass("ui-selecting").addClass("ui-selected");g.selecting=false;g.selected=true;g.startselected=true;f._trigger("selected",c,{selected:g.element})});this._trigger("stop",c);this.helper.remove();return false}});b.extend(b.ui.selectable,{version:"1.8.9"})})(jQuery);\n(function(b){b.widget("ui.sortable",b.ui.mouse,{widgetEventPrefix:"sort",options:{appendTo:"parent",axis:false,connectWith:false,containment:false,cursor:"auto",cursorAt:false,dropOnEmpty:true,forcePlaceholderSize:false,forceHelperSize:false,grid:false,handle:false,helper:"original",items:"> *",opacity:false,placeholder:false,revert:false,scroll:true,scrollSensitivity:20,scrollSpeed:20,scope:"default",tolerance:"intersect",zIndex:1E3},_create:function(){this.containerCache={};this.element.addClass("ui-sortable");\nthis.refresh();this.floating=this.items.length?/left|right/.test(this.items[0].item.css("float")):false;this.offset=this.element.offset();this._mouseInit()},destroy:function(){this.element.removeClass("ui-sortable ui-sortable-disabled").removeData("sortable").unbind(".sortable");this._mouseDestroy();for(var c=this.items.length-1;c>=0;c--)this.items[c].item.removeData("sortable-item");return this},_setOption:function(c,f){if(c==="disabled"){this.options[c]=f;this.widget()[f?"addClass":"removeClass"]("ui-sortable-disabled")}else b.Widget.prototype._setOption.apply(this,\narguments)},_mouseCapture:function(c,f){if(this.reverting)return false;if(this.options.disabled||this.options.type=="static")return false;this._refreshItems(c);var g=null,e=this;b(c.target).parents().each(function(){if(b.data(this,"sortable-item")==e){g=b(this);return false}});if(b.data(c.target,"sortable-item")==e)g=b(c.target);if(!g)return false;if(this.options.handle&&!f){var a=false;b(this.options.handle,g).find("*").andSelf().each(function(){if(this==c.target)a=true});if(!a)return false}this.currentItem=\ng;this._removeCurrentsFromItems();return true},_mouseStart:function(c,f,g){f=this.options;var e=this;this.currentContainer=this;this.refreshPositions();this.helper=this._createHelper(c);this._cacheHelperProportions();this._cacheMargins();this.scrollParent=this.helper.scrollParent();this.offset=this.currentItem.offset();this.offset={top:this.offset.top-this.margins.top,left:this.offset.left-this.margins.left};this.helper.css("position","absolute");this.cssPosition=this.helper.css("position");b.extend(this.offset,\n{click:{left:c.pageX-this.offset.left,top:c.pageY-this.offset.top},parent:this._getParentOffset(),relative:this._getRelativeOffset()});this.originalPosition=this._generatePosition(c);this.originalPageX=c.pageX;this.originalPageY=c.pageY;f.cursorAt&&this._adjustOffsetFromHelper(f.cursorAt);this.domPosition={prev:this.currentItem.prev()[0],parent:this.currentItem.parent()[0]};this.helper[0]!=this.currentItem[0]&&this.currentItem.hide();this._createPlaceholder();f.containment&&this._setContainment();\nif(f.cursor){if(b("body").css("cursor"))this._storedCursor=b("body").css("cursor");b("body").css("cursor",f.cursor)}if(f.opacity){if(this.helper.css("opacity"))this._storedOpacity=this.helper.css("opacity");this.helper.css("opacity",f.opacity)}if(f.zIndex){if(this.helper.css("zIndex"))this._storedZIndex=this.helper.css("zIndex");this.helper.css("zIndex",f.zIndex)}if(this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML")this.overflowOffset=this.scrollParent.offset();this._trigger("start",\nc,this._uiHash());this._preserveHelperProportions||this._cacheHelperProportions();if(!g)for(g=this.containers.length-1;g>=0;g--)this.containers[g]._trigger("activate",c,e._uiHash(this));if(b.ui.ddmanager)b.ui.ddmanager.current=this;b.ui.ddmanager&&!f.dropBehaviour&&b.ui.ddmanager.prepareOffsets(this,c);this.dragging=true;this.helper.addClass("ui-sortable-helper");this._mouseDrag(c);return true},_mouseDrag:function(c){this.position=this._generatePosition(c);this.positionAbs=this._convertPositionTo("absolute");\nif(!this.lastPositionAbs)this.lastPositionAbs=this.positionAbs;if(this.options.scroll){var f=this.options,g=false;if(this.scrollParent[0]!=document&&this.scrollParent[0].tagName!="HTML"){if(this.overflowOffset.top+this.scrollParent[0].offsetHeight-c.pageY<f.scrollSensitivity)this.scrollParent[0].scrollTop=g=this.scrollParent[0].scrollTop+f.scrollSpeed;else if(c.pageY-this.overflowOffset.top<f.scrollSensitivity)this.scrollParent[0].scrollTop=g=this.scrollParent[0].scrollTop-f.scrollSpeed;if(this.overflowOffset.left+\nthis.scrollParent[0].offsetWidth-c.pageX<f.scrollSensitivity)this.scrollParent[0].scrollLeft=g=this.scrollParent[0].scrollLeft+f.scrollSpeed;else if(c.pageX-this.overflowOffset.left<f.scrollSensitivity)this.scrollParent[0].scrollLeft=g=this.scrollParent[0].scrollLeft-f.scrollSpeed}else{if(c.pageY-b(document).scrollTop()<f.scrollSensitivity)g=b(document).scrollTop(b(document).scrollTop()-f.scrollSpeed);else if(b(window).height()-(c.pageY-b(document).scrollTop())<f.scrollSensitivity)g=b(document).scrollTop(b(document).scrollTop()+\nf.scrollSpeed);if(c.pageX-b(document).scrollLeft()<f.scrollSensitivity)g=b(document).scrollLeft(b(document).scrollLeft()-f.scrollSpeed);else if(b(window).width()-(c.pageX-b(document).scrollLeft())<f.scrollSensitivity)g=b(document).scrollLeft(b(document).scrollLeft()+f.scrollSpeed)}g!==false&&b.ui.ddmanager&&!f.dropBehaviour&&b.ui.ddmanager.prepareOffsets(this,c)}this.positionAbs=this._convertPositionTo("absolute");if(!this.options.axis||this.options.axis!="y")this.helper[0].style.left=this.position.left+\n"px";if(!this.options.axis||this.options.axis!="x")this.helper[0].style.top=this.position.top+"px";for(f=this.items.length-1;f>=0;f--){g=this.items[f];var e=g.item[0],a=this._intersectsWithPointer(g);if(a)if(e!=this.currentItem[0]&&this.placeholder[a==1?"next":"prev"]()[0]!=e&&!b.ui.contains(this.placeholder[0],e)&&(this.options.type=="semi-dynamic"?!b.ui.contains(this.element[0],e):true)){this.direction=a==1?"down":"up";if(this.options.tolerance=="pointer"||this._intersectsWithSides(g))this._rearrange(c,\ng);else break;this._trigger("change",c,this._uiHash());break}}this._contactContainers(c);b.ui.ddmanager&&b.ui.ddmanager.drag(this,c);this._trigger("sort",c,this._uiHash());this.lastPositionAbs=this.positionAbs;return false},_mouseStop:function(c,f){if(c){b.ui.ddmanager&&!this.options.dropBehaviour&&b.ui.ddmanager.drop(this,c);if(this.options.revert){var g=this;f=g.placeholder.offset();g.reverting=true;b(this.helper).animate({left:f.left-this.offset.parent.left-g.margins.left+(this.offsetParent[0]==\ndocument.body?0:this.offsetParent[0].scrollLeft),top:f.top-this.offset.parent.top-g.margins.top+(this.offsetParent[0]==document.body?0:this.offsetParent[0].scrollTop)},parseInt(this.options.revert,10)||500,function(){g._clear(c)})}else this._clear(c,f);return false}},cancel:function(){var c=this;if(this.dragging){this._mouseUp({target:null});this.options.helper=="original"?this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper"):this.currentItem.show();for(var f=this.containers.length-\n1;f>=0;f--){this.containers[f]._trigger("deactivate",null,c._uiHash(this));if(this.containers[f].containerCache.over){this.containers[f]._trigger("out",null,c._uiHash(this));this.containers[f].containerCache.over=0}}}if(this.placeholder){this.placeholder[0].parentNode&&this.placeholder[0].parentNode.removeChild(this.placeholder[0]);this.options.helper!="original"&&this.helper&&this.helper[0].parentNode&&this.helper.remove();b.extend(this,{helper:null,dragging:false,reverting:false,_noFinalSort:null});\nthis.domPosition.prev?b(this.domPosition.prev).after(this.currentItem):b(this.domPosition.parent).prepend(this.currentItem)}return this},serialize:function(c){var f=this._getItemsAsjQuery(c&&c.connected),g=[];c=c||{};b(f).each(function(){var e=(b(c.item||this).attr(c.attribute||"id")||"").match(c.expression||/(.+)[-=_](.+)/);if(e)g.push((c.key||e[1]+"[]")+"="+(c.key&&c.expression?e[1]:e[2]))});!g.length&&c.key&&g.push(c.key+"=");return g.join("&")},toArray:function(c){var f=this._getItemsAsjQuery(c&&\nc.connected),g=[];c=c||{};f.each(function(){g.push(b(c.item||this).attr(c.attribute||"id")||"")});return g},_intersectsWith:function(c){var f=this.positionAbs.left,g=f+this.helperProportions.width,e=this.positionAbs.top,a=e+this.helperProportions.height,d=c.left,h=d+c.width,i=c.top,j=i+c.height,n=this.offset.click.top,q=this.offset.click.left;n=e+n>i&&e+n<j&&f+q>d&&f+q<h;return this.options.tolerance=="pointer"||this.options.forcePointerForContainers||this.options.tolerance!="pointer"&&this.helperProportions[this.floating?\n"width":"height"]>c[this.floating?"width":"height"]?n:d<f+this.helperProportions.width/2&&g-this.helperProportions.width/2<h&&i<e+this.helperProportions.height/2&&a-this.helperProportions.height/2<j},_intersectsWithPointer:function(c){var f=b.ui.isOverAxis(this.positionAbs.top+this.offset.click.top,c.top,c.height);c=b.ui.isOverAxis(this.positionAbs.left+this.offset.click.left,c.left,c.width);f=f&&c;c=this._getDragVerticalDirection();var g=this._getDragHorizontalDirection();if(!f)return false;return this.floating?\ng&&g=="right"||c=="down"?2:1:c&&(c=="down"?2:1)},_intersectsWithSides:function(c){var f=b.ui.isOverAxis(this.positionAbs.top+this.offset.click.top,c.top+c.height/2,c.height);c=b.ui.isOverAxis(this.positionAbs.left+this.offset.click.left,c.left+c.width/2,c.width);var g=this._getDragVerticalDirection(),e=this._getDragHorizontalDirection();return this.floating&&e?e=="right"&&c||e=="left"&&!c:g&&(g=="down"&&f||g=="up"&&!f)},_getDragVerticalDirection:function(){var c=this.positionAbs.top-this.lastPositionAbs.top;\nreturn c!=0&&(c>0?"down":"up")},_getDragHorizontalDirection:function(){var c=this.positionAbs.left-this.lastPositionAbs.left;return c!=0&&(c>0?"right":"left")},refresh:function(c){this._refreshItems(c);this.refreshPositions();return this},_connectWith:function(){var c=this.options;return c.connectWith.constructor==String?[c.connectWith]:c.connectWith},_getItemsAsjQuery:function(c){var f=[],g=[],e=this._connectWith();if(e&&c)for(c=e.length-1;c>=0;c--)for(var a=b(e[c]),d=a.length-1;d>=0;d--){var h=\nb.data(a[d],"sortable");if(h&&h!=this&&!h.options.disabled)g.push([b.isFunction(h.options.items)?h.options.items.call(h.element):b(h.options.items,h.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),h])}g.push([b.isFunction(this.options.items)?this.options.items.call(this.element,null,{options:this.options,item:this.currentItem}):b(this.options.items,this.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"),this]);for(c=g.length-1;c>=0;c--)g[c][0].each(function(){f.push(this)});\nreturn b(f)},_removeCurrentsFromItems:function(){for(var c=this.currentItem.find(":data(sortable-item)"),f=0;f<this.items.length;f++)for(var g=0;g<c.length;g++)c[g]==this.items[f].item[0]&&this.items.splice(f,1)},_refreshItems:function(c){this.items=[];this.containers=[this];var f=this.items,g=[[b.isFunction(this.options.items)?this.options.items.call(this.element[0],c,{item:this.currentItem}):b(this.options.items,this.element),this]],e=this._connectWith();if(e)for(var a=e.length-1;a>=0;a--)for(var d=\nb(e[a]),h=d.length-1;h>=0;h--){var i=b.data(d[h],"sortable");if(i&&i!=this&&!i.options.disabled){g.push([b.isFunction(i.options.items)?i.options.items.call(i.element[0],c,{item:this.currentItem}):b(i.options.items,i.element),i]);this.containers.push(i)}}for(a=g.length-1;a>=0;a--){c=g[a][1];e=g[a][0];h=0;for(d=e.length;h<d;h++){i=b(e[h]);i.data("sortable-item",c);f.push({item:i,instance:c,width:0,height:0,left:0,top:0})}}},refreshPositions:function(c){if(this.offsetParent&&this.helper)this.offset.parent=\nthis._getParentOffset();for(var f=this.items.length-1;f>=0;f--){var g=this.items[f],e=this.options.toleranceElement?b(this.options.toleranceElement,g.item):g.item;if(!c){g.width=e.outerWidth();g.height=e.outerHeight()}e=e.offset();g.left=e.left;g.top=e.top}if(this.options.custom&&this.options.custom.refreshContainers)this.options.custom.refreshContainers.call(this);else for(f=this.containers.length-1;f>=0;f--){e=this.containers[f].element.offset();this.containers[f].containerCache.left=e.left;this.containers[f].containerCache.top=\ne.top;this.containers[f].containerCache.width=this.containers[f].element.outerWidth();this.containers[f].containerCache.height=this.containers[f].element.outerHeight()}return this},_createPlaceholder:function(c){var f=c||this,g=f.options;if(!g.placeholder||g.placeholder.constructor==String){var e=g.placeholder;g.placeholder={element:function(){var a=b(document.createElement(f.currentItem[0].nodeName)).addClass(e||f.currentItem[0].className+" ui-sortable-placeholder").removeClass("ui-sortable-helper")[0];\nif(!e)a.style.visibility="hidden";return a},update:function(a,d){if(!(e&&!g.forcePlaceholderSize)){d.height()||d.height(f.currentItem.innerHeight()-parseInt(f.currentItem.css("paddingTop")||0,10)-parseInt(f.currentItem.css("paddingBottom")||0,10));d.width()||d.width(f.currentItem.innerWidth()-parseInt(f.currentItem.css("paddingLeft")||0,10)-parseInt(f.currentItem.css("paddingRight")||0,10))}}}}f.placeholder=b(g.placeholder.element.call(f.element,f.currentItem));f.currentItem.after(f.placeholder);\ng.placeholder.update(f,f.placeholder)},_contactContainers:function(c){for(var f=null,g=null,e=this.containers.length-1;e>=0;e--)if(!b.ui.contains(this.currentItem[0],this.containers[e].element[0]))if(this._intersectsWith(this.containers[e].containerCache)){if(!(f&&b.ui.contains(this.containers[e].element[0],f.element[0]))){f=this.containers[e];g=e}}else if(this.containers[e].containerCache.over){this.containers[e]._trigger("out",c,this._uiHash(this));this.containers[e].containerCache.over=0}if(f)if(this.containers.length===\n1){this.containers[g]._trigger("over",c,this._uiHash(this));this.containers[g].containerCache.over=1}else if(this.currentContainer!=this.containers[g]){f=1E4;e=null;for(var a=this.positionAbs[this.containers[g].floating?"left":"top"],d=this.items.length-1;d>=0;d--)if(b.ui.contains(this.containers[g].element[0],this.items[d].item[0])){var h=this.items[d][this.containers[g].floating?"left":"top"];if(Math.abs(h-a)<f){f=Math.abs(h-a);e=this.items[d]}}if(e||this.options.dropOnEmpty){this.currentContainer=\nthis.containers[g];e?this._rearrange(c,e,null,true):this._rearrange(c,null,this.containers[g].element,true);this._trigger("change",c,this._uiHash());this.containers[g]._trigger("change",c,this._uiHash(this));this.options.placeholder.update(this.currentContainer,this.placeholder);this.containers[g]._trigger("over",c,this._uiHash(this));this.containers[g].containerCache.over=1}}},_createHelper:function(c){var f=this.options;c=b.isFunction(f.helper)?b(f.helper.apply(this.element[0],[c,this.currentItem])):\nf.helper=="clone"?this.currentItem.clone():this.currentItem;c.parents("body").length||b(f.appendTo!="parent"?f.appendTo:this.currentItem[0].parentNode)[0].appendChild(c[0]);if(c[0]==this.currentItem[0])this._storedCSS={width:this.currentItem[0].style.width,height:this.currentItem[0].style.height,position:this.currentItem.css("position"),top:this.currentItem.css("top"),left:this.currentItem.css("left")};if(c[0].style.width==""||f.forceHelperSize)c.width(this.currentItem.width());if(c[0].style.height==\n""||f.forceHelperSize)c.height(this.currentItem.height());return c},_adjustOffsetFromHelper:function(c){if(typeof c=="string")c=c.split(" ");if(b.isArray(c))c={left:+c[0],top:+c[1]||0};if("left"in c)this.offset.click.left=c.left+this.margins.left;if("right"in c)this.offset.click.left=this.helperProportions.width-c.right+this.margins.left;if("top"in c)this.offset.click.top=c.top+this.margins.top;if("bottom"in c)this.offset.click.top=this.helperProportions.height-c.bottom+this.margins.top},_getParentOffset:function(){this.offsetParent=\nthis.helper.offsetParent();var c=this.offsetParent.offset();if(this.cssPosition=="absolute"&&this.scrollParent[0]!=document&&b.ui.contains(this.scrollParent[0],this.offsetParent[0])){c.left+=this.scrollParent.scrollLeft();c.top+=this.scrollParent.scrollTop()}if(this.offsetParent[0]==document.body||this.offsetParent[0].tagName&&this.offsetParent[0].tagName.toLowerCase()=="html"&&b.browser.msie)c={top:0,left:0};return{top:c.top+(parseInt(this.offsetParent.css("borderTopWidth"),10)||0),left:c.left+(parseInt(this.offsetParent.css("borderLeftWidth"),\n10)||0)}},_getRelativeOffset:function(){if(this.cssPosition=="relative"){var c=this.currentItem.position();return{top:c.top-(parseInt(this.helper.css("top"),10)||0)+this.scrollParent.scrollTop(),left:c.left-(parseInt(this.helper.css("left"),10)||0)+this.scrollParent.scrollLeft()}}else return{top:0,left:0}},_cacheMargins:function(){this.margins={left:parseInt(this.currentItem.css("marginLeft"),10)||0,top:parseInt(this.currentItem.css("marginTop"),10)||0}},_cacheHelperProportions:function(){this.helperProportions=\n{width:this.helper.outerWidth(),height:this.helper.outerHeight()}},_setContainment:function(){var c=this.options;if(c.containment=="parent")c.containment=this.helper[0].parentNode;if(c.containment=="document"||c.containment=="window")this.containment=[0-this.offset.relative.left-this.offset.parent.left,0-this.offset.relative.top-this.offset.parent.top,b(c.containment=="document"?document:window).width()-this.helperProportions.width-this.margins.left,(b(c.containment=="document"?document:window).height()||\ndocument.body.parentNode.scrollHeight)-this.helperProportions.height-this.margins.top];if(!/^(document|window|parent)$/.test(c.containment)){var f=b(c.containment)[0];c=b(c.containment).offset();var g=b(f).css("overflow")!="hidden";this.containment=[c.left+(parseInt(b(f).css("borderLeftWidth"),10)||0)+(parseInt(b(f).css("paddingLeft"),10)||0)-this.margins.left,c.top+(parseInt(b(f).css("borderTopWidth"),10)||0)+(parseInt(b(f).css("paddingTop"),10)||0)-this.margins.top,c.left+(g?Math.max(f.scrollWidth,\nf.offsetWidth):f.offsetWidth)-(parseInt(b(f).css("borderLeftWidth"),10)||0)-(parseInt(b(f).css("paddingRight"),10)||0)-this.helperProportions.width-this.margins.left,c.top+(g?Math.max(f.scrollHeight,f.offsetHeight):f.offsetHeight)-(parseInt(b(f).css("borderTopWidth"),10)||0)-(parseInt(b(f).css("paddingBottom"),10)||0)-this.helperProportions.height-this.margins.top]}},_convertPositionTo:function(c,f){if(!f)f=this.position;c=c=="absolute"?1:-1;var g=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=\ndocument&&b.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,e=/(html|body)/i.test(g[0].tagName);return{top:f.top+this.offset.relative.top*c+this.offset.parent.top*c-(b.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollTop():e?0:g.scrollTop())*c),left:f.left+this.offset.relative.left*c+this.offset.parent.left*c-(b.browser.safari&&this.cssPosition=="fixed"?0:(this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():\ne?0:g.scrollLeft())*c)}},_generatePosition:function(c){var f=this.options,g=this.cssPosition=="absolute"&&!(this.scrollParent[0]!=document&&b.ui.contains(this.scrollParent[0],this.offsetParent[0]))?this.offsetParent:this.scrollParent,e=/(html|body)/i.test(g[0].tagName);if(this.cssPosition=="relative"&&!(this.scrollParent[0]!=document&&this.scrollParent[0]!=this.offsetParent[0]))this.offset.relative=this._getRelativeOffset();var a=c.pageX,d=c.pageY;if(this.originalPosition){if(this.containment){if(c.pageX-\nthis.offset.click.left<this.containment[0])a=this.containment[0]+this.offset.click.left;if(c.pageY-this.offset.click.top<this.containment[1])d=this.containment[1]+this.offset.click.top;if(c.pageX-this.offset.click.left>this.containment[2])a=this.containment[2]+this.offset.click.left;if(c.pageY-this.offset.click.top>this.containment[3])d=this.containment[3]+this.offset.click.top}if(f.grid){d=this.originalPageY+Math.round((d-this.originalPageY)/f.grid[1])*f.grid[1];d=this.containment?!(d-this.offset.click.top<\nthis.containment[1]||d-this.offset.click.top>this.containment[3])?d:!(d-this.offset.click.top<this.containment[1])?d-f.grid[1]:d+f.grid[1]:d;a=this.originalPageX+Math.round((a-this.originalPageX)/f.grid[0])*f.grid[0];a=this.containment?!(a-this.offset.click.left<this.containment[0]||a-this.offset.click.left>this.containment[2])?a:!(a-this.offset.click.left<this.containment[0])?a-f.grid[0]:a+f.grid[0]:a}}return{top:d-this.offset.click.top-this.offset.relative.top-this.offset.parent.top+(b.browser.safari&&\nthis.cssPosition=="fixed"?0:this.cssPosition=="fixed"?-this.scrollParent.scrollTop():e?0:g.scrollTop()),left:a-this.offset.click.left-this.offset.relative.left-this.offset.parent.left+(b.browser.safari&&this.cssPosition=="fixed"?0:this.cssPosition=="fixed"?-this.scrollParent.scrollLeft():e?0:g.scrollLeft())}},_rearrange:function(c,f,g,e){g?g[0].appendChild(this.placeholder[0]):f.item[0].parentNode.insertBefore(this.placeholder[0],this.direction=="down"?f.item[0]:f.item[0].nextSibling);this.counter=\nthis.counter?++this.counter:1;var a=this,d=this.counter;window.setTimeout(function(){d==a.counter&&a.refreshPositions(!e)},0)},_clear:function(c,f){this.reverting=false;var g=[];!this._noFinalSort&&this.currentItem[0].parentNode&&this.placeholder.before(this.currentItem);this._noFinalSort=null;if(this.helper[0]==this.currentItem[0]){for(var e in this._storedCSS)if(this._storedCSS[e]=="auto"||this._storedCSS[e]=="static")this._storedCSS[e]="";this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper")}else this.currentItem.show();\nthis.fromOutside&&!f&&g.push(function(a){this._trigger("receive",a,this._uiHash(this.fromOutside))});if((this.fromOutside||this.domPosition.prev!=this.currentItem.prev().not(".ui-sortable-helper")[0]||this.domPosition.parent!=this.currentItem.parent()[0])&&!f)g.push(function(a){this._trigger("update",a,this._uiHash())});if(!b.ui.contains(this.element[0],this.currentItem[0])){f||g.push(function(a){this._trigger("remove",a,this._uiHash())});for(e=this.containers.length-1;e>=0;e--)if(b.ui.contains(this.containers[e].element[0],\nthis.currentItem[0])&&!f){g.push(function(a){return function(d){a._trigger("receive",d,this._uiHash(this))}}.call(this,this.containers[e]));g.push(function(a){return function(d){a._trigger("update",d,this._uiHash(this))}}.call(this,this.containers[e]))}}for(e=this.containers.length-1;e>=0;e--){f||g.push(function(a){return function(d){a._trigger("deactivate",d,this._uiHash(this))}}.call(this,this.containers[e]));if(this.containers[e].containerCache.over){g.push(function(a){return function(d){a._trigger("out",\nd,this._uiHash(this))}}.call(this,this.containers[e]));this.containers[e].containerCache.over=0}}this._storedCursor&&b("body").css("cursor",this._storedCursor);this._storedOpacity&&this.helper.css("opacity",this._storedOpacity);if(this._storedZIndex)this.helper.css("zIndex",this._storedZIndex=="auto"?"":this._storedZIndex);this.dragging=false;if(this.cancelHelperRemoval){if(!f){this._trigger("beforeStop",c,this._uiHash());for(e=0;e<g.length;e++)g[e].call(this,c);this._trigger("stop",c,this._uiHash())}return false}f||\nthis._trigger("beforeStop",c,this._uiHash());this.placeholder[0].parentNode.removeChild(this.placeholder[0]);this.helper[0]!=this.currentItem[0]&&this.helper.remove();this.helper=null;if(!f){for(e=0;e<g.length;e++)g[e].call(this,c);this._trigger("stop",c,this._uiHash())}this.fromOutside=false;return true},_trigger:function(){b.Widget.prototype._trigger.apply(this,arguments)===false&&this.cancel()},_uiHash:function(c){var f=c||this;return{helper:f.helper,placeholder:f.placeholder||b([]),position:f.position,\noriginalPosition:f.originalPosition,offset:f.positionAbs,item:f.currentItem,sender:c?c.element:null}}});b.extend(b.ui.sortable,{version:"1.8.9"})})(jQuery);\njQuery.effects||function(b,c){function f(l){var k;if(l&&l.constructor==Array&&l.length==3)return l;if(k=/rgb\\(\\s*([0-9]{1,3})\\s*,\\s*([0-9]{1,3})\\s*,\\s*([0-9]{1,3})\\s*\\)/.exec(l))return[parseInt(k[1],10),parseInt(k[2],10),parseInt(k[3],10)];if(k=/rgb\\(\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*,\\s*([0-9]+(?:\\.[0-9]+)?)\\%\\s*\\)/.exec(l))return[parseFloat(k[1])*2.55,parseFloat(k[2])*2.55,parseFloat(k[3])*2.55];if(k=/#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(l))return[parseInt(k[1],\n16),parseInt(k[2],16),parseInt(k[3],16)];if(k=/#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(l))return[parseInt(k[1]+k[1],16),parseInt(k[2]+k[2],16),parseInt(k[3]+k[3],16)];if(/rgba\\(0, 0, 0, 0\\)/.exec(l))return j.transparent;return j[b.trim(l).toLowerCase()]}function g(l,k){var m;do{m=b.curCSS(l,k);if(m!=""&&m!="transparent"||b.nodeName(l,"body"))break;k="backgroundColor"}while(l=l.parentNode);return f(m)}function e(){var l=document.defaultView?document.defaultView.getComputedStyle(this,null):this.currentStyle,\nk={},m,o;if(l&&l.length&&l[0]&&l[l[0]])for(var p=l.length;p--;){m=l[p];if(typeof l[m]=="string"){o=m.replace(/\\-(\\w)/g,function(s,r){return r.toUpperCase()});k[o]=l[m]}}else for(m in l)if(typeof l[m]==="string")k[m]=l[m];return k}function a(l){var k,m;for(k in l){m=l[k];if(m==null||b.isFunction(m)||k in q||/scrollbar/.test(k)||!/color/i.test(k)&&isNaN(parseFloat(m)))delete l[k]}return l}function d(l,k){var m={_:0},o;for(o in k)if(l[o]!=k[o])m[o]=k[o];return m}function h(l,k,m,o){if(typeof l=="object"){o=\nk;m=null;k=l;l=k.effect}if(b.isFunction(k)){o=k;m=null;k={}}if(typeof k=="number"||b.fx.speeds[k]){o=m;m=k;k={}}if(b.isFunction(m)){o=m;m=null}k=k||{};m=m||k.duration;m=b.fx.off?0:typeof m=="number"?m:m in b.fx.speeds?b.fx.speeds[m]:b.fx.speeds._default;o=o||k.complete;return[l,k,m,o]}function i(l){if(!l||typeof l==="number"||b.fx.speeds[l])return true;if(typeof l==="string"&&!b.effects[l])return true;return false}b.effects={};b.each(["backgroundColor","borderBottomColor","borderLeftColor","borderRightColor",\n"borderTopColor","borderColor","color","outlineColor"],function(l,k){b.fx.step[k]=function(m){if(!m.colorInit){m.start=g(m.elem,k);m.end=f(m.end);m.colorInit=true}m.elem.style[k]="rgb("+Math.max(Math.min(parseInt(m.pos*(m.end[0]-m.start[0])+m.start[0],10),255),0)+","+Math.max(Math.min(parseInt(m.pos*(m.end[1]-m.start[1])+m.start[1],10),255),0)+","+Math.max(Math.min(parseInt(m.pos*(m.end[2]-m.start[2])+m.start[2],10),255),0)+")"}});var j={aqua:[0,255,255],azure:[240,255,255],beige:[245,245,220],black:[0,\n0,0],blue:[0,0,255],brown:[165,42,42],cyan:[0,255,255],darkblue:[0,0,139],darkcyan:[0,139,139],darkgrey:[169,169,169],darkgreen:[0,100,0],darkkhaki:[189,183,107],darkmagenta:[139,0,139],darkolivegreen:[85,107,47],darkorange:[255,140,0],darkorchid:[153,50,204],darkred:[139,0,0],darksalmon:[233,150,122],darkviolet:[148,0,211],fuchsia:[255,0,255],gold:[255,215,0],green:[0,128,0],indigo:[75,0,130],khaki:[240,230,140],lightblue:[173,216,230],lightcyan:[224,255,255],lightgreen:[144,238,144],lightgrey:[211,\n211,211],lightpink:[255,182,193],lightyellow:[255,255,224],lime:[0,255,0],magenta:[255,0,255],maroon:[128,0,0],navy:[0,0,128],olive:[128,128,0],orange:[255,165,0],pink:[255,192,203],purple:[128,0,128],violet:[128,0,128],red:[255,0,0],silver:[192,192,192],white:[255,255,255],yellow:[255,255,0],transparent:[255,255,255]},n=["add","remove","toggle"],q={border:1,borderBottom:1,borderColor:1,borderLeft:1,borderRight:1,borderTop:1,borderWidth:1,margin:1,padding:1};b.effects.animateClass=function(l,k,m,\no){if(b.isFunction(m)){o=m;m=null}return this.queue("fx",function(){var p=b(this),s=p.attr("style")||" ",r=a(e.call(this)),u,v=p.attr("className");b.each(n,function(w,y){l[y]&&p[y+"Class"](l[y])});u=a(e.call(this));p.attr("className",v);p.animate(d(r,u),k,m,function(){b.each(n,function(w,y){l[y]&&p[y+"Class"](l[y])});if(typeof p.attr("style")=="object"){p.attr("style").cssText="";p.attr("style").cssText=s}else p.attr("style",s);o&&o.apply(this,arguments)});r=b.queue(this);u=r.splice(r.length-1,1)[0];\nr.splice(1,0,u);b.dequeue(this)})};b.fn.extend({_addClass:b.fn.addClass,addClass:function(l,k,m,o){return k?b.effects.animateClass.apply(this,[{add:l},k,m,o]):this._addClass(l)},_removeClass:b.fn.removeClass,removeClass:function(l,k,m,o){return k?b.effects.animateClass.apply(this,[{remove:l},k,m,o]):this._removeClass(l)},_toggleClass:b.fn.toggleClass,toggleClass:function(l,k,m,o,p){return typeof k=="boolean"||k===c?m?b.effects.animateClass.apply(this,[k?{add:l}:{remove:l},m,o,p]):this._toggleClass(l,\nk):b.effects.animateClass.apply(this,[{toggle:l},k,m,o])},switchClass:function(l,k,m,o,p){return b.effects.animateClass.apply(this,[{add:k,remove:l},m,o,p])}});b.extend(b.effects,{version:"1.8.9",save:function(l,k){for(var m=0;m<k.length;m++)k[m]!==null&&l.data("ec.storage."+k[m],l[0].style[k[m]])},restore:function(l,k){for(var m=0;m<k.length;m++)k[m]!==null&&l.css(k[m],l.data("ec.storage."+k[m]))},setMode:function(l,k){if(k=="toggle")k=l.is(":hidden")?"show":"hide";return k},getBaseline:function(l,\nk){var m;switch(l[0]){case "top":m=0;break;case "middle":m=0.5;break;case "bottom":m=1;break;default:m=l[0]/k.height}switch(l[1]){case "left":l=0;break;case "center":l=0.5;break;case "right":l=1;break;default:l=l[1]/k.width}return{x:l,y:m}},createWrapper:function(l){if(l.parent().is(".ui-effects-wrapper"))return l.parent();var k={width:l.outerWidth(true),height:l.outerHeight(true),"float":l.css("float")},m=b("<div></div>").addClass("ui-effects-wrapper").css({fontSize:"100%",background:"transparent",\nborder:"none",margin:0,padding:0});l.wrap(m);m=l.parent();if(l.css("position")=="static"){m.css({position:"relative"});l.css({position:"relative"})}else{b.extend(k,{position:l.css("position"),zIndex:l.css("z-index")});b.each(["top","left","bottom","right"],function(o,p){k[p]=l.css(p);if(isNaN(parseInt(k[p],10)))k[p]="auto"});l.css({position:"relative",top:0,left:0,right:"auto",bottom:"auto"})}return m.css(k).show()},removeWrapper:function(l){if(l.parent().is(".ui-effects-wrapper"))return l.parent().replaceWith(l);\nreturn l},setTransition:function(l,k,m,o){o=o||{};b.each(k,function(p,s){unit=l.cssUnit(s);if(unit[0]>0)o[s]=unit[0]*m+unit[1]});return o}});b.fn.extend({effect:function(l){var k=h.apply(this,arguments),m={options:k[1],duration:k[2],callback:k[3]};k=m.options.mode;var o=b.effects[l];if(b.fx.off||!o)return k?this[k](m.duration,m.callback):this.each(function(){m.callback&&m.callback.call(this)});return o.call(this,m)},_show:b.fn.show,show:function(l){if(i(l))return this._show.apply(this,arguments);\nelse{var k=h.apply(this,arguments);k[1].mode="show";return this.effect.apply(this,k)}},_hide:b.fn.hide,hide:function(l){if(i(l))return this._hide.apply(this,arguments);else{var k=h.apply(this,arguments);k[1].mode="hide";return this.effect.apply(this,k)}},__toggle:b.fn.toggle,toggle:function(l){if(i(l)||typeof l==="boolean"||b.isFunction(l))return this.__toggle.apply(this,arguments);else{var k=h.apply(this,arguments);k[1].mode="toggle";return this.effect.apply(this,k)}},cssUnit:function(l){var k=this.css(l),\nm=[];b.each(["em","px","%","pt"],function(o,p){if(k.indexOf(p)>0)m=[parseFloat(k),p]});return m}});b.easing.jswing=b.easing.swing;b.extend(b.easing,{def:"easeOutQuad",swing:function(l,k,m,o,p){return b.easing[b.easing.def](l,k,m,o,p)},easeInQuad:function(l,k,m,o,p){return o*(k/=p)*k+m},easeOutQuad:function(l,k,m,o,p){return-o*(k/=p)*(k-2)+m},easeInOutQuad:function(l,k,m,o,p){if((k/=p/2)<1)return o/2*k*k+m;return-o/2*(--k*(k-2)-1)+m},easeInCubic:function(l,k,m,o,p){return o*(k/=p)*k*k+m},easeOutCubic:function(l,\nk,m,o,p){return o*((k=k/p-1)*k*k+1)+m},easeInOutCubic:function(l,k,m,o,p){if((k/=p/2)<1)return o/2*k*k*k+m;return o/2*((k-=2)*k*k+2)+m},easeInQuart:function(l,k,m,o,p){return o*(k/=p)*k*k*k+m},easeOutQuart:function(l,k,m,o,p){return-o*((k=k/p-1)*k*k*k-1)+m},easeInOutQuart:function(l,k,m,o,p){if((k/=p/2)<1)return o/2*k*k*k*k+m;return-o/2*((k-=2)*k*k*k-2)+m},easeInQuint:function(l,k,m,o,p){return o*(k/=p)*k*k*k*k+m},easeOutQuint:function(l,k,m,o,p){return o*((k=k/p-1)*k*k*k*k+1)+m},easeInOutQuint:function(l,\nk,m,o,p){if((k/=p/2)<1)return o/2*k*k*k*k*k+m;return o/2*((k-=2)*k*k*k*k+2)+m},easeInSine:function(l,k,m,o,p){return-o*Math.cos(k/p*(Math.PI/2))+o+m},easeOutSine:function(l,k,m,o,p){return o*Math.sin(k/p*(Math.PI/2))+m},easeInOutSine:function(l,k,m,o,p){return-o/2*(Math.cos(Math.PI*k/p)-1)+m},easeInExpo:function(l,k,m,o,p){return k==0?m:o*Math.pow(2,10*(k/p-1))+m},easeOutExpo:function(l,k,m,o,p){return k==p?m+o:o*(-Math.pow(2,-10*k/p)+1)+m},easeInOutExpo:function(l,k,m,o,p){if(k==0)return m;if(k==\np)return m+o;if((k/=p/2)<1)return o/2*Math.pow(2,10*(k-1))+m;return o/2*(-Math.pow(2,-10*--k)+2)+m},easeInCirc:function(l,k,m,o,p){return-o*(Math.sqrt(1-(k/=p)*k)-1)+m},easeOutCirc:function(l,k,m,o,p){return o*Math.sqrt(1-(k=k/p-1)*k)+m},easeInOutCirc:function(l,k,m,o,p){if((k/=p/2)<1)return-o/2*(Math.sqrt(1-k*k)-1)+m;return o/2*(Math.sqrt(1-(k-=2)*k)+1)+m},easeInElastic:function(l,k,m,o,p){l=1.70158;var s=0,r=o;if(k==0)return m;if((k/=p)==1)return m+o;s||(s=p*0.3);if(r<Math.abs(o)){r=o;l=s/4}else l=\ns/(2*Math.PI)*Math.asin(o/r);return-(r*Math.pow(2,10*(k-=1))*Math.sin((k*p-l)*2*Math.PI/s))+m},easeOutElastic:function(l,k,m,o,p){l=1.70158;var s=0,r=o;if(k==0)return m;if((k/=p)==1)return m+o;s||(s=p*0.3);if(r<Math.abs(o)){r=o;l=s/4}else l=s/(2*Math.PI)*Math.asin(o/r);return r*Math.pow(2,-10*k)*Math.sin((k*p-l)*2*Math.PI/s)+o+m},easeInOutElastic:function(l,k,m,o,p){l=1.70158;var s=0,r=o;if(k==0)return m;if((k/=p/2)==2)return m+o;s||(s=p*0.3*1.5);if(r<Math.abs(o)){r=o;l=s/4}else l=s/(2*Math.PI)*Math.asin(o/\nr);if(k<1)return-0.5*r*Math.pow(2,10*(k-=1))*Math.sin((k*p-l)*2*Math.PI/s)+m;return r*Math.pow(2,-10*(k-=1))*Math.sin((k*p-l)*2*Math.PI/s)*0.5+o+m},easeInBack:function(l,k,m,o,p,s){if(s==c)s=1.70158;return o*(k/=p)*k*((s+1)*k-s)+m},easeOutBack:function(l,k,m,o,p,s){if(s==c)s=1.70158;return o*((k=k/p-1)*k*((s+1)*k+s)+1)+m},easeInOutBack:function(l,k,m,o,p,s){if(s==c)s=1.70158;if((k/=p/2)<1)return o/2*k*k*(((s*=1.525)+1)*k-s)+m;return o/2*((k-=2)*k*(((s*=1.525)+1)*k+s)+2)+m},easeInBounce:function(l,\nk,m,o,p){return o-b.easing.easeOutBounce(l,p-k,0,o,p)+m},easeOutBounce:function(l,k,m,o,p){return(k/=p)<1/2.75?o*7.5625*k*k+m:k<2/2.75?o*(7.5625*(k-=1.5/2.75)*k+0.75)+m:k<2.5/2.75?o*(7.5625*(k-=2.25/2.75)*k+0.9375)+m:o*(7.5625*(k-=2.625/2.75)*k+0.984375)+m},easeInOutBounce:function(l,k,m,o,p){if(k<p/2)return b.easing.easeInBounce(l,k*2,0,o,p)*0.5+m;return b.easing.easeOutBounce(l,k*2-p,0,o,p)*0.5+o*0.5+m}})}(jQuery);\n(function(b){b.effects.blind=function(c){return this.queue(function(){var f=b(this),g=["position","top","bottom","left","right"],e=b.effects.setMode(f,c.options.mode||"hide"),a=c.options.direction||"vertical";b.effects.save(f,g);f.show();var d=b.effects.createWrapper(f).css({overflow:"hidden"}),h=a=="vertical"?"height":"width";a=a=="vertical"?d.height():d.width();e=="show"&&d.css(h,0);var i={};i[h]=e=="show"?a:0;d.animate(i,c.duration,c.options.easing,function(){e=="hide"&&f.hide();b.effects.restore(f,\ng);b.effects.removeWrapper(f);c.callback&&c.callback.apply(f[0],arguments);f.dequeue()})})}})(jQuery);\n(function(b){b.effects.bounce=function(c){return this.queue(function(){var f=b(this),g=["position","top","bottom","left","right"],e=b.effects.setMode(f,c.options.mode||"effect"),a=c.options.direction||"up",d=c.options.distance||20,h=c.options.times||5,i=c.duration||250;/show|hide/.test(e)&&g.push("opacity");b.effects.save(f,g);f.show();b.effects.createWrapper(f);var j=a=="up"||a=="down"?"top":"left";a=a=="up"||a=="left"?"pos":"neg";d=c.options.distance||(j=="top"?f.outerHeight({margin:true})/3:f.outerWidth({margin:true})/\n3);if(e=="show")f.css("opacity",0).css(j,a=="pos"?-d:d);if(e=="hide")d/=h*2;e!="hide"&&h--;if(e=="show"){var n={opacity:1};n[j]=(a=="pos"?"+=":"-=")+d;f.animate(n,i/2,c.options.easing);d/=2;h--}for(n=0;n<h;n++){var q={},l={};q[j]=(a=="pos"?"-=":"+=")+d;l[j]=(a=="pos"?"+=":"-=")+d;f.animate(q,i/2,c.options.easing).animate(l,i/2,c.options.easing);d=e=="hide"?d*2:d/2}if(e=="hide"){n={opacity:0};n[j]=(a=="pos"?"-=":"+=")+d;f.animate(n,i/2,c.options.easing,function(){f.hide();b.effects.restore(f,g);b.effects.removeWrapper(f);\nc.callback&&c.callback.apply(this,arguments)})}else{q={};l={};q[j]=(a=="pos"?"-=":"+=")+d;l[j]=(a=="pos"?"+=":"-=")+d;f.animate(q,i/2,c.options.easing).animate(l,i/2,c.options.easing,function(){b.effects.restore(f,g);b.effects.removeWrapper(f);c.callback&&c.callback.apply(this,arguments)})}f.queue("fx",function(){f.dequeue()});f.dequeue()})}})(jQuery);\n(function(b){b.effects.clip=function(c){return this.queue(function(){var f=b(this),g=["position","top","bottom","left","right","height","width"],e=b.effects.setMode(f,c.options.mode||"hide"),a=c.options.direction||"vertical";b.effects.save(f,g);f.show();var d=b.effects.createWrapper(f).css({overflow:"hidden"});d=f[0].tagName=="IMG"?d:f;var h={size:a=="vertical"?"height":"width",position:a=="vertical"?"top":"left"};a=a=="vertical"?d.height():d.width();if(e=="show"){d.css(h.size,0);d.css(h.position,\na/2)}var i={};i[h.size]=e=="show"?a:0;i[h.position]=e=="show"?0:a/2;d.animate(i,{queue:false,duration:c.duration,easing:c.options.easing,complete:function(){e=="hide"&&f.hide();b.effects.restore(f,g);b.effects.removeWrapper(f);c.callback&&c.callback.apply(f[0],arguments);f.dequeue()}})})}})(jQuery);\n(function(b){b.effects.drop=function(c){return this.queue(function(){var f=b(this),g=["position","top","bottom","left","right","opacity"],e=b.effects.setMode(f,c.options.mode||"hide"),a=c.options.direction||"left";b.effects.save(f,g);f.show();b.effects.createWrapper(f);var d=a=="up"||a=="down"?"top":"left";a=a=="up"||a=="left"?"pos":"neg";var h=c.options.distance||(d=="top"?f.outerHeight({margin:true})/2:f.outerWidth({margin:true})/2);if(e=="show")f.css("opacity",0).css(d,a=="pos"?-h:h);var i={opacity:e==\n"show"?1:0};i[d]=(e=="show"?a=="pos"?"+=":"-=":a=="pos"?"-=":"+=")+h;f.animate(i,{queue:false,duration:c.duration,easing:c.options.easing,complete:function(){e=="hide"&&f.hide();b.effects.restore(f,g);b.effects.removeWrapper(f);c.callback&&c.callback.apply(this,arguments);f.dequeue()}})})}})(jQuery);\n(function(b){b.effects.explode=function(c){return this.queue(function(){var f=c.options.pieces?Math.round(Math.sqrt(c.options.pieces)):3,g=c.options.pieces?Math.round(Math.sqrt(c.options.pieces)):3;c.options.mode=c.options.mode=="toggle"?b(this).is(":visible")?"hide":"show":c.options.mode;var e=b(this).show().css("visibility","hidden"),a=e.offset();a.top-=parseInt(e.css("marginTop"),10)||0;a.left-=parseInt(e.css("marginLeft"),10)||0;for(var d=e.outerWidth(true),h=e.outerHeight(true),i=0;i<f;i++)for(var j=\n0;j<g;j++)e.clone().appendTo("body").wrap("<div></div>").css({position:"absolute",visibility:"visible",left:-j*(d/g),top:-i*(h/f)}).parent().addClass("ui-effects-explode").css({position:"absolute",overflow:"hidden",width:d/g,height:h/f,left:a.left+j*(d/g)+(c.options.mode=="show"?(j-Math.floor(g/2))*(d/g):0),top:a.top+i*(h/f)+(c.options.mode=="show"?(i-Math.floor(f/2))*(h/f):0),opacity:c.options.mode=="show"?0:1}).animate({left:a.left+j*(d/g)+(c.options.mode=="show"?0:(j-Math.floor(g/2))*(d/g)),top:a.top+\ni*(h/f)+(c.options.mode=="show"?0:(i-Math.floor(f/2))*(h/f)),opacity:c.options.mode=="show"?1:0},c.duration||500);setTimeout(function(){c.options.mode=="show"?e.css({visibility:"visible"}):e.css({visibility:"visible"}).hide();c.callback&&c.callback.apply(e[0]);e.dequeue();b("div.ui-effects-explode").remove()},c.duration||500)})}})(jQuery);\n(function(b){b.effects.fade=function(c){return this.queue(function(){var f=b(this),g=b.effects.setMode(f,c.options.mode||"hide");f.animate({opacity:g},{queue:false,duration:c.duration,easing:c.options.easing,complete:function(){c.callback&&c.callback.apply(this,arguments);f.dequeue()}})})}})(jQuery);\n(function(b){b.effects.fold=function(c){return this.queue(function(){var f=b(this),g=["position","top","bottom","left","right"],e=b.effects.setMode(f,c.options.mode||"hide"),a=c.options.size||15,d=!!c.options.horizFirst,h=c.duration?c.duration/2:b.fx.speeds._default/2;b.effects.save(f,g);f.show();var i=b.effects.createWrapper(f).css({overflow:"hidden"}),j=e=="show"!=d,n=j?["width","height"]:["height","width"];j=j?[i.width(),i.height()]:[i.height(),i.width()];var q=/([0-9]+)%/.exec(a);if(q)a=parseInt(q[1],\n10)/100*j[e=="hide"?0:1];if(e=="show")i.css(d?{height:0,width:a}:{height:a,width:0});d={};q={};d[n[0]]=e=="show"?j[0]:a;q[n[1]]=e=="show"?j[1]:0;i.animate(d,h,c.options.easing).animate(q,h,c.options.easing,function(){e=="hide"&&f.hide();b.effects.restore(f,g);b.effects.removeWrapper(f);c.callback&&c.callback.apply(f[0],arguments);f.dequeue()})})}})(jQuery);\n(function(b){b.effects.highlight=function(c){return this.queue(function(){var f=b(this),g=["backgroundImage","backgroundColor","opacity"],e=b.effects.setMode(f,c.options.mode||"show"),a={backgroundColor:f.css("backgroundColor")};if(e=="hide")a.opacity=0;b.effects.save(f,g);f.show().css({backgroundImage:"none",backgroundColor:c.options.color||"#ffff99"}).animate(a,{queue:false,duration:c.duration,easing:c.options.easing,complete:function(){e=="hide"&&f.hide();b.effects.restore(f,g);e=="show"&&!b.support.opacity&&\nthis.style.removeAttribute("filter");c.callback&&c.callback.apply(this,arguments);f.dequeue()}})})}})(jQuery);\n(function(b){b.effects.pulsate=function(c){return this.queue(function(){var f=b(this),g=b.effects.setMode(f,c.options.mode||"show");times=(c.options.times||5)*2-1;duration=c.duration?c.duration/2:b.fx.speeds._default/2;isVisible=f.is(":visible");animateTo=0;if(!isVisible){f.css("opacity",0).show();animateTo=1}if(g=="hide"&&isVisible||g=="show"&&!isVisible)times--;for(g=0;g<times;g++){f.animate({opacity:animateTo},duration,c.options.easing);animateTo=(animateTo+1)%2}f.animate({opacity:animateTo},duration,\nc.options.easing,function(){animateTo==0&&f.hide();c.callback&&c.callback.apply(this,arguments)});f.queue("fx",function(){f.dequeue()}).dequeue()})}})(jQuery);\n(function(b){b.effects.puff=function(c){return this.queue(function(){var f=b(this),g=b.effects.setMode(f,c.options.mode||"hide"),e=parseInt(c.options.percent,10)||150,a=e/100,d={height:f.height(),width:f.width()};b.extend(c.options,{fade:true,mode:g,percent:g=="hide"?e:100,from:g=="hide"?d:{height:d.height*a,width:d.width*a}});f.effect("scale",c.options,c.duration,c.callback);f.dequeue()})};b.effects.scale=function(c){return this.queue(function(){var f=b(this),g=b.extend(true,{},c.options),e=b.effects.setMode(f,\nc.options.mode||"effect"),a=parseInt(c.options.percent,10)||(parseInt(c.options.percent,10)==0?0:e=="hide"?0:100),d=c.options.direction||"both",h=c.options.origin;if(e!="effect"){g.origin=h||["middle","center"];g.restore=true}h={height:f.height(),width:f.width()};f.from=c.options.from||(e=="show"?{height:0,width:0}:h);a={y:d!="horizontal"?a/100:1,x:d!="vertical"?a/100:1};f.to={height:h.height*a.y,width:h.width*a.x};if(c.options.fade){if(e=="show"){f.from.opacity=0;f.to.opacity=1}if(e=="hide"){f.from.opacity=\n1;f.to.opacity=0}}g.from=f.from;g.to=f.to;g.mode=e;f.effect("size",g,c.duration,c.callback);f.dequeue()})};b.effects.size=function(c){return this.queue(function(){var f=b(this),g=["position","top","bottom","left","right","width","height","overflow","opacity"],e=["position","top","bottom","left","right","overflow","opacity"],a=["width","height","overflow"],d=["fontSize"],h=["borderTopWidth","borderBottomWidth","paddingTop","paddingBottom"],i=["borderLeftWidth","borderRightWidth","paddingLeft","paddingRight"],\nj=b.effects.setMode(f,c.options.mode||"effect"),n=c.options.restore||false,q=c.options.scale||"both",l=c.options.origin,k={height:f.height(),width:f.width()};f.from=c.options.from||k;f.to=c.options.to||k;if(l){l=b.effects.getBaseline(l,k);f.from.top=(k.height-f.from.height)*l.y;f.from.left=(k.width-f.from.width)*l.x;f.to.top=(k.height-f.to.height)*l.y;f.to.left=(k.width-f.to.width)*l.x}var m={from:{y:f.from.height/k.height,x:f.from.width/k.width},to:{y:f.to.height/k.height,x:f.to.width/k.width}};\nif(q=="box"||q=="both"){if(m.from.y!=m.to.y){g=g.concat(h);f.from=b.effects.setTransition(f,h,m.from.y,f.from);f.to=b.effects.setTransition(f,h,m.to.y,f.to)}if(m.from.x!=m.to.x){g=g.concat(i);f.from=b.effects.setTransition(f,i,m.from.x,f.from);f.to=b.effects.setTransition(f,i,m.to.x,f.to)}}if(q=="content"||q=="both")if(m.from.y!=m.to.y){g=g.concat(d);f.from=b.effects.setTransition(f,d,m.from.y,f.from);f.to=b.effects.setTransition(f,d,m.to.y,f.to)}b.effects.save(f,n?g:e);f.show();b.effects.createWrapper(f);\nf.css("overflow","hidden").css(f.from);if(q=="content"||q=="both"){h=h.concat(["marginTop","marginBottom"]).concat(d);i=i.concat(["marginLeft","marginRight"]);a=g.concat(h).concat(i);f.find("*[width]").each(function(){child=b(this);n&&b.effects.save(child,a);var o={height:child.height(),width:child.width()};child.from={height:o.height*m.from.y,width:o.width*m.from.x};child.to={height:o.height*m.to.y,width:o.width*m.to.x};if(m.from.y!=m.to.y){child.from=b.effects.setTransition(child,h,m.from.y,child.from);\nchild.to=b.effects.setTransition(child,h,m.to.y,child.to)}if(m.from.x!=m.to.x){child.from=b.effects.setTransition(child,i,m.from.x,child.from);child.to=b.effects.setTransition(child,i,m.to.x,child.to)}child.css(child.from);child.animate(child.to,c.duration,c.options.easing,function(){n&&b.effects.restore(child,a)})})}f.animate(f.to,{queue:false,duration:c.duration,easing:c.options.easing,complete:function(){f.to.opacity===0&&f.css("opacity",f.from.opacity);j=="hide"&&f.hide();b.effects.restore(f,\nn?g:e);b.effects.removeWrapper(f);c.callback&&c.callback.apply(this,arguments);f.dequeue()}})})}})(jQuery);\n(function(b){b.effects.shake=function(c){return this.queue(function(){var f=b(this),g=["position","top","bottom","left","right"];b.effects.setMode(f,c.options.mode||"effect");var e=c.options.direction||"left",a=c.options.distance||20,d=c.options.times||3,h=c.duration||c.options.duration||140;b.effects.save(f,g);f.show();b.effects.createWrapper(f);var i=e=="up"||e=="down"?"top":"left",j=e=="up"||e=="left"?"pos":"neg";e={};var n={},q={};e[i]=(j=="pos"?"-=":"+=")+a;n[i]=(j=="pos"?"+=":"-=")+a*2;q[i]=\n(j=="pos"?"-=":"+=")+a*2;f.animate(e,h,c.options.easing);for(a=1;a<d;a++)f.animate(n,h,c.options.easing).animate(q,h,c.options.easing);f.animate(n,h,c.options.easing).animate(e,h/2,c.options.easing,function(){b.effects.restore(f,g);b.effects.removeWrapper(f);c.callback&&c.callback.apply(this,arguments)});f.queue("fx",function(){f.dequeue()});f.dequeue()})}})(jQuery);\n(function(b){b.effects.slide=function(c){return this.queue(function(){var f=b(this),g=["position","top","bottom","left","right"],e=b.effects.setMode(f,c.options.mode||"show"),a=c.options.direction||"left";b.effects.save(f,g);f.show();b.effects.createWrapper(f).css({overflow:"hidden"});var d=a=="up"||a=="down"?"top":"left";a=a=="up"||a=="left"?"pos":"neg";var h=c.options.distance||(d=="top"?f.outerHeight({margin:true}):f.outerWidth({margin:true}));if(e=="show")f.css(d,a=="pos"?isNaN(h)?"-"+h:-h:h);\nvar i={};i[d]=(e=="show"?a=="pos"?"+=":"-=":a=="pos"?"-=":"+=")+h;f.animate(i,{queue:false,duration:c.duration,easing:c.options.easing,complete:function(){e=="hide"&&f.hide();b.effects.restore(f,g);b.effects.removeWrapper(f);c.callback&&c.callback.apply(this,arguments);f.dequeue()}})})}})(jQuery);\n(function(b){b.effects.transfer=function(c){return this.queue(function(){var f=b(this),g=b(c.options.to),e=g.offset();g={top:e.top,left:e.left,height:g.innerHeight(),width:g.innerWidth()};e=f.offset();var a=b(\'<div class="ui-effects-transfer"></div>\').appendTo(document.body).addClass(c.options.className).css({top:e.top,left:e.left,height:f.innerHeight(),width:f.innerWidth(),position:"absolute"}).animate(g,c.duration,c.options.easing,function(){a.remove();c.callback&&c.callback.apply(f[0],arguments);\nf.dequeue()})})}})(jQuery);\n(function(b){b.widget("ui.accordion",{options:{active:0,animated:"slide",autoHeight:true,clearStyle:false,collapsible:false,event:"click",fillSpace:false,header:"> li > :first-child,> :not(li):even",icons:{header:"ui-icon-triangle-1-e",headerSelected:"ui-icon-triangle-1-s"},navigation:false,navigationFilter:function(){return this.href.toLowerCase()===location.href.toLowerCase()}},_create:function(){var c=this,f=c.options;c.running=0;c.element.addClass("ui-accordion ui-widget ui-helper-reset").children("li").addClass("ui-accordion-li-fix");c.headers=\nc.element.find(f.header).addClass("ui-accordion-header ui-helper-reset ui-state-default ui-corner-all").bind("mouseenter.accordion",function(){f.disabled||b(this).addClass("ui-state-hover")}).bind("mouseleave.accordion",function(){f.disabled||b(this).removeClass("ui-state-hover")}).bind("focus.accordion",function(){f.disabled||b(this).addClass("ui-state-focus")}).bind("blur.accordion",function(){f.disabled||b(this).removeClass("ui-state-focus")});c.headers.next().addClass("ui-accordion-content ui-helper-reset ui-widget-content ui-corner-bottom");\nif(f.navigation){var g=c.element.find("a").filter(f.navigationFilter).eq(0);if(g.length){var e=g.closest(".ui-accordion-header");c.active=e.length?e:g.closest(".ui-accordion-content").prev()}}c.active=c._findActive(c.active||f.active).addClass("ui-state-default ui-state-active").toggleClass("ui-corner-all").toggleClass("ui-corner-top");c.active.next().addClass("ui-accordion-content-active");c._createIcons();c.resize();c.element.attr("role","tablist");c.headers.attr("role","tab").bind("keydown.accordion",\nfunction(a){return c._keydown(a)}).next().attr("role","tabpanel");c.headers.not(c.active||"").attr({"aria-expanded":"false",tabIndex:-1}).next().hide();c.active.length?c.active.attr({"aria-expanded":"true",tabIndex:0}):c.headers.eq(0).attr("tabIndex",0);b.browser.safari||c.headers.find("a").attr("tabIndex",-1);f.event&&c.headers.bind(f.event.split(" ").join(".accordion ")+".accordion",function(a){c._clickHandler.call(c,a,this);a.preventDefault()})},_createIcons:function(){var c=this.options;if(c.icons){b("<span></span>").addClass("ui-icon "+\nc.icons.header).prependTo(this.headers);this.active.children(".ui-icon").toggleClass(c.icons.header).toggleClass(c.icons.headerSelected);this.element.addClass("ui-accordion-icons")}},_destroyIcons:function(){this.headers.children(".ui-icon").remove();this.element.removeClass("ui-accordion-icons")},destroy:function(){var c=this.options;this.element.removeClass("ui-accordion ui-widget ui-helper-reset").removeAttr("role");this.headers.unbind(".accordion").removeClass("ui-accordion-header ui-accordion-disabled ui-helper-reset ui-state-default ui-corner-all ui-state-active ui-state-disabled ui-corner-top").removeAttr("role").removeAttr("aria-expanded").removeAttr("tabIndex");\nthis.headers.find("a").removeAttr("tabIndex");this._destroyIcons();var f=this.headers.next().css("display","").removeAttr("role").removeClass("ui-helper-reset ui-widget-content ui-corner-bottom ui-accordion-content ui-accordion-content-active ui-accordion-disabled ui-state-disabled");if(c.autoHeight||c.fillHeight)f.css("height","");return b.Widget.prototype.destroy.call(this)},_setOption:function(c,f){b.Widget.prototype._setOption.apply(this,arguments);c=="active"&&this.activate(f);if(c=="icons"){this._destroyIcons();\nf&&this._createIcons()}if(c=="disabled")this.headers.add(this.headers.next())[f?"addClass":"removeClass"]("ui-accordion-disabled ui-state-disabled")},_keydown:function(c){if(!(this.options.disabled||c.altKey||c.ctrlKey)){var f=b.ui.keyCode,g=this.headers.length,e=this.headers.index(c.target),a=false;switch(c.keyCode){case f.RIGHT:case f.DOWN:a=this.headers[(e+1)%g];break;case f.LEFT:case f.UP:a=this.headers[(e-1+g)%g];break;case f.SPACE:case f.ENTER:this._clickHandler({target:c.target},c.target);\nc.preventDefault()}if(a){b(c.target).attr("tabIndex",-1);b(a).attr("tabIndex",0);a.focus();return false}return true}},resize:function(){var c=this.options,f;if(c.fillSpace){if(b.browser.msie){var g=this.element.parent().css("overflow");this.element.parent().css("overflow","hidden")}f=this.element.parent().height();b.browser.msie&&this.element.parent().css("overflow",g);this.headers.each(function(){f-=b(this).outerHeight(true)});this.headers.next().each(function(){b(this).height(Math.max(0,f-b(this).innerHeight()+\nb(this).height()))}).css("overflow","auto")}else if(c.autoHeight){f=0;this.headers.next().each(function(){f=Math.max(f,b(this).height("").height())}).height(f)}return this},activate:function(c){this.options.active=c;c=this._findActive(c)[0];this._clickHandler({target:c},c);return this},_findActive:function(c){return c?typeof c==="number"?this.headers.filter(":eq("+c+")"):this.headers.not(this.headers.not(c)):c===false?b([]):this.headers.filter(":eq(0)")},_clickHandler:function(c,f){var g=this.options;\nif(!g.disabled)if(c.target){c=b(c.currentTarget||f);f=c[0]===this.active[0];g.active=g.collapsible&&f?false:this.headers.index(c);if(!(this.running||!g.collapsible&&f)){var e=this.active;i=c.next();d=this.active.next();h={options:g,newHeader:f&&g.collapsible?b([]):c,oldHeader:this.active,newContent:f&&g.collapsible?b([]):i,oldContent:d};var a=this.headers.index(this.active[0])>this.headers.index(c[0]);this.active=f?b([]):c;this._toggle(i,d,h,f,a);e.removeClass("ui-state-active ui-corner-top").addClass("ui-state-default ui-corner-all").children(".ui-icon").removeClass(g.icons.headerSelected).addClass(g.icons.header);\nif(!f){c.removeClass("ui-state-default ui-corner-all").addClass("ui-state-active ui-corner-top").children(".ui-icon").removeClass(g.icons.header).addClass(g.icons.headerSelected);c.next().addClass("ui-accordion-content-active")}}}else if(g.collapsible){this.active.removeClass("ui-state-active ui-corner-top").addClass("ui-state-default ui-corner-all").children(".ui-icon").removeClass(g.icons.headerSelected).addClass(g.icons.header);this.active.next().addClass("ui-accordion-content-active");var d=this.active.next(),\nh={options:g,newHeader:b([]),oldHeader:g.active,newContent:b([]),oldContent:d},i=this.active=b([]);this._toggle(i,d,h)}},_toggle:function(c,f,g,e,a){var d=this,h=d.options;d.toShow=c;d.toHide=f;d.data=g;var i=function(){if(d)return d._completed.apply(d,arguments)};d._trigger("changestart",null,d.data);d.running=f.size()===0?c.size():f.size();if(h.animated){g={};g=h.collapsible&&e?{toShow:b([]),toHide:f,complete:i,down:a,autoHeight:h.autoHeight||h.fillSpace}:{toShow:c,toHide:f,complete:i,down:a,autoHeight:h.autoHeight||\nh.fillSpace};if(!h.proxied)h.proxied=h.animated;if(!h.proxiedDuration)h.proxiedDuration=h.duration;h.animated=b.isFunction(h.proxied)?h.proxied(g):h.proxied;h.duration=b.isFunction(h.proxiedDuration)?h.proxiedDuration(g):h.proxiedDuration;e=b.ui.accordion.animations;var j=h.duration,n=h.animated;if(n&&!e[n]&&!b.easing[n])n="slide";e[n]||(e[n]=function(q){this.slide(q,{easing:n,duration:j||700})});e[n](g)}else{if(h.collapsible&&e)c.toggle();else{f.hide();c.show()}i(true)}f.prev().attr({"aria-expanded":"false",\ntabIndex:-1}).blur();c.prev().attr({"aria-expanded":"true",tabIndex:0}).focus()},_completed:function(c){this.running=c?0:--this.running;if(!this.running){this.options.clearStyle&&this.toShow.add(this.toHide).css({height:"",overflow:""});this.toHide.removeClass("ui-accordion-content-active");if(this.toHide.length)this.toHide.parent()[0].className=this.toHide.parent()[0].className;this._trigger("change",null,this.data)}}});b.extend(b.ui.accordion,{version:"1.8.9",animations:{slide:function(c,f){c=\nb.extend({easing:"swing",duration:300},c,f);if(c.toHide.size())if(c.toShow.size()){var g=c.toShow.css("overflow"),e=0,a={},d={},h;f=c.toShow;h=f[0].style.width;f.width(parseInt(f.parent().width(),10)-parseInt(f.css("paddingLeft"),10)-parseInt(f.css("paddingRight"),10)-(parseInt(f.css("borderLeftWidth"),10)||0)-(parseInt(f.css("borderRightWidth"),10)||0));b.each(["height","paddingTop","paddingBottom"],function(i,j){d[j]="hide";i=(""+b.css(c.toShow[0],j)).match(/^([\\d+-.]+)(.*)$/);a[j]={value:i[1],\nunit:i[2]||"px"}});c.toShow.css({height:0,overflow:"hidden"}).show();c.toHide.filter(":hidden").each(c.complete).end().filter(":visible").animate(d,{step:function(i,j){if(j.prop=="height")e=j.end-j.start===0?0:(j.now-j.start)/(j.end-j.start);c.toShow[0].style[j.prop]=e*a[j.prop].value+a[j.prop].unit},duration:c.duration,easing:c.easing,complete:function(){c.autoHeight||c.toShow.css("height","");c.toShow.css({width:h,overflow:g});c.complete()}})}else c.toHide.animate({height:"hide",paddingTop:"hide",\npaddingBottom:"hide"},c);else c.toShow.animate({height:"show",paddingTop:"show",paddingBottom:"show"},c)},bounceslide:function(c){this.slide(c,{easing:c.down?"easeOutBounce":"swing",duration:c.down?1E3:200})}}})})(jQuery);\n(function(b){b.widget("ui.autocomplete",{options:{appendTo:"body",delay:300,minLength:1,position:{my:"left top",at:"left bottom",collision:"none"},source:null},pending:0,_create:function(){var c=this,f=this.element[0].ownerDocument,g;this.element.addClass("ui-autocomplete-input").attr("autocomplete","off").attr({role:"textbox","aria-autocomplete":"list","aria-haspopup":"true"}).bind("keydown.autocomplete",function(e){if(!(c.options.disabled||c.element.attr("readonly"))){g=false;var a=b.ui.keyCode;\nswitch(e.keyCode){case a.PAGE_UP:c._move("previousPage",e);break;case a.PAGE_DOWN:c._move("nextPage",e);break;case a.UP:c._move("previous",e);e.preventDefault();break;case a.DOWN:c._move("next",e);e.preventDefault();break;case a.ENTER:case a.NUMPAD_ENTER:if(c.menu.active){g=true;e.preventDefault()}case a.TAB:if(!c.menu.active)return;c.menu.select(e);break;case a.ESCAPE:c.element.val(c.term);c.close(e);break;default:clearTimeout(c.searching);c.searching=setTimeout(function(){if(c.term!=c.element.val()){c.selectedItem=\nnull;c.search(null,e)}},c.options.delay);break}}}).bind("keypress.autocomplete",function(e){if(g){g=false;e.preventDefault()}}).bind("focus.autocomplete",function(){if(!c.options.disabled){c.selectedItem=null;c.previous=c.element.val()}}).bind("blur.autocomplete",function(e){if(!c.options.disabled){clearTimeout(c.searching);c.closing=setTimeout(function(){c.close(e);c._change(e)},150)}});this._initSource();this.response=function(){return c._response.apply(c,arguments)};this.menu=b("<ul></ul>").addClass("ui-autocomplete").appendTo(b(this.options.appendTo||\n"body",f)[0]).mousedown(function(e){var a=c.menu.element[0];b(e.target).closest(".ui-menu-item").length||setTimeout(function(){b(document).one("mousedown",function(d){d.target!==c.element[0]&&d.target!==a&&!b.ui.contains(a,d.target)&&c.close()})},1);setTimeout(function(){clearTimeout(c.closing)},13)}).menu({focus:function(e,a){a=a.item.data("item.autocomplete");false!==c._trigger("focus",e,{item:a})&&/^key/.test(e.originalEvent.type)&&c.element.val(a.value)},selected:function(e,a){var d=a.item.data("item.autocomplete"),\nh=c.previous;if(c.element[0]!==f.activeElement){c.element.focus();c.previous=h;setTimeout(function(){c.previous=h;c.selectedItem=d},1)}false!==c._trigger("select",e,{item:d})&&c.element.val(d.value);c.term=c.element.val();c.close(e);c.selectedItem=d},blur:function(){c.menu.element.is(":visible")&&c.element.val()!==c.term&&c.element.val(c.term)}}).zIndex(this.element.zIndex()+1).css({top:0,left:0}).hide().data("menu");b.fn.bgiframe&&this.menu.element.bgiframe()},destroy:function(){this.element.removeClass("ui-autocomplete-input").removeAttr("autocomplete").removeAttr("role").removeAttr("aria-autocomplete").removeAttr("aria-haspopup");\nthis.menu.element.remove();b.Widget.prototype.destroy.call(this)},_setOption:function(c,f){b.Widget.prototype._setOption.apply(this,arguments);c==="source"&&this._initSource();if(c==="appendTo")this.menu.element.appendTo(b(f||"body",this.element[0].ownerDocument)[0]);c==="disabled"&&f&&this.xhr&&this.xhr.abort()},_initSource:function(){var c=this,f,g;if(b.isArray(this.options.source)){f=this.options.source;this.source=function(e,a){a(b.ui.autocomplete.filter(f,e.term))}}else if(typeof this.options.source===\n"string"){g=this.options.source;this.source=function(e,a){c.xhr&&c.xhr.abort();c.xhr=b.ajax({url:g,data:e,dataType:"json",success:function(d,h,i){i===c.xhr&&a(d);c.xhr=null},error:function(d){d===c.xhr&&a([]);c.xhr=null}})}}else this.source=this.options.source},search:function(c,f){c=c!=null?c:this.element.val();this.term=this.element.val();if(c.length<this.options.minLength)return this.close(f);clearTimeout(this.closing);if(this._trigger("search",f)!==false)return this._search(c)},_search:function(c){this.pending++;\nthis.element.addClass("ui-autocomplete-loading");this.source({term:c},this.response)},_response:function(c){if(!this.options.disabled&&c&&c.length){c=this._normalize(c);this._suggest(c);this._trigger("open")}else this.close();this.pending--;this.pending||this.element.removeClass("ui-autocomplete-loading")},close:function(c){clearTimeout(this.closing);if(this.menu.element.is(":visible")){this.menu.element.hide();this.menu.deactivate();this._trigger("close",c)}},_change:function(c){this.previous!==\nthis.element.val()&&this._trigger("change",c,{item:this.selectedItem})},_normalize:function(c){if(c.length&&c[0].label&&c[0].value)return c;return b.map(c,function(f){if(typeof f==="string")return{label:f,value:f};return b.extend({label:f.label||f.value,value:f.value||f.label},f)})},_suggest:function(c){var f=this.menu.element.empty().zIndex(this.element.zIndex()+1);this._renderMenu(f,c);this.menu.deactivate();this.menu.refresh();f.show();this._resizeMenu();f.position(b.extend({of:this.element},this.options.position))},\n_resizeMenu:function(){var c=this.menu.element;c.outerWidth(Math.max(c.width("").outerWidth(),this.element.outerWidth()))},_renderMenu:function(c,f){var g=this;b.each(f,function(e,a){g._renderItem(c,a)})},_renderItem:function(c,f){return b("<li></li>").data("item.autocomplete",f).append(b("<a></a>").text(f.label)).appendTo(c)},_move:function(c,f){if(this.menu.element.is(":visible"))if(this.menu.first()&&/^previous/.test(c)||this.menu.last()&&/^next/.test(c)){this.element.val(this.term);this.menu.deactivate()}else this.menu[c](f);\nelse this.search(null,f)},widget:function(){return this.menu.element}});b.extend(b.ui.autocomplete,{escapeRegex:function(c){return c.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g,"\\\\$&")},filter:function(c,f){var g=new RegExp(b.ui.autocomplete.escapeRegex(f),"i");return b.grep(c,function(e){return g.test(e.label||e.value||e)})}})})(jQuery);\n(function(b){b.widget("ui.menu",{_create:function(){var c=this;this.element.addClass("ui-menu ui-widget ui-widget-content ui-corner-all").attr({role:"listbox","aria-activedescendant":"ui-active-menuitem"}).click(function(f){if(b(f.target).closest(".ui-menu-item a").length){f.preventDefault();c.select(f)}});this.refresh()},refresh:function(){var c=this;this.element.children("li:not(.ui-menu-item):has(a)").addClass("ui-menu-item").attr("role","menuitem").children("a").addClass("ui-corner-all").attr("tabindex",\n-1).mouseenter(function(f){c.activate(f,b(this).parent())}).mouseleave(function(){c.deactivate()})},activate:function(c,f){this.deactivate();if(this.hasScroll()){var g=f.offset().top-this.element.offset().top,e=this.element.attr("scrollTop"),a=this.element.height();if(g<0)this.element.attr("scrollTop",e+g);else g>=a&&this.element.attr("scrollTop",e+g-a+f.height())}this.active=f.eq(0).children("a").addClass("ui-state-hover").attr("id","ui-active-menuitem").end();this._trigger("focus",c,{item:f})},\ndeactivate:function(){if(this.active){this.active.children("a").removeClass("ui-state-hover").removeAttr("id");this._trigger("blur");this.active=null}},next:function(c){this.move("next",".ui-menu-item:first",c)},previous:function(c){this.move("prev",".ui-menu-item:last",c)},first:function(){return this.active&&!this.active.prevAll(".ui-menu-item").length},last:function(){return this.active&&!this.active.nextAll(".ui-menu-item").length},move:function(c,f,g){if(this.active){c=this.active[c+"All"](".ui-menu-item").eq(0);\nc.length?this.activate(g,c):this.activate(g,this.element.children(f))}else this.activate(g,this.element.children(f))},nextPage:function(c){if(this.hasScroll())if(!this.active||this.last())this.activate(c,this.element.children(".ui-menu-item:first"));else{var f=this.active.offset().top,g=this.element.height(),e=this.element.children(".ui-menu-item").filter(function(){var a=b(this).offset().top-f-g+b(this).height();return a<10&&a>-10});e.length||(e=this.element.children(".ui-menu-item:last"));this.activate(c,\ne)}else this.activate(c,this.element.children(".ui-menu-item").filter(!this.active||this.last()?":first":":last"))},previousPage:function(c){if(this.hasScroll())if(!this.active||this.first())this.activate(c,this.element.children(".ui-menu-item:last"));else{var f=this.active.offset().top,g=this.element.height();result=this.element.children(".ui-menu-item").filter(function(){var e=b(this).offset().top-f+g-b(this).height();return e<10&&e>-10});result.length||(result=this.element.children(".ui-menu-item:first"));\nthis.activate(c,result)}else this.activate(c,this.element.children(".ui-menu-item").filter(!this.active||this.first()?":last":":first"))},hasScroll:function(){return this.element.height()<this.element.attr("scrollHeight")},select:function(c){this._trigger("selected",c,{item:this.active})}})})(jQuery);\n(function(b){var c,f=function(e){b(":ui-button",e.target.form).each(function(){var a=b(this).data("button");setTimeout(function(){a.refresh()},1)})},g=function(e){var a=e.name,d=e.form,h=b([]);if(a)h=d?b(d).find("[name=\'"+a+"\']"):b("[name=\'"+a+"\']",e.ownerDocument).filter(function(){return!this.form});return h};b.widget("ui.button",{options:{disabled:null,text:true,label:null,icons:{primary:null,secondary:null}},_create:function(){this.element.closest("form").unbind("reset.button").bind("reset.button",\nf);if(typeof this.options.disabled!=="boolean")this.options.disabled=this.element.attr("disabled");this._determineButtonType();this.hasTitle=!!this.buttonElement.attr("title");var e=this,a=this.options,d=this.type==="checkbox"||this.type==="radio",h="ui-state-hover"+(!d?" ui-state-active":"");if(a.label===null)a.label=this.buttonElement.html();if(this.element.is(":disabled"))a.disabled=true;this.buttonElement.addClass("ui-button ui-widget ui-state-default ui-corner-all").attr("role","button").bind("mouseenter.button",\nfunction(){if(!a.disabled){b(this).addClass("ui-state-hover");this===c&&b(this).addClass("ui-state-active")}}).bind("mouseleave.button",function(){a.disabled||b(this).removeClass(h)}).bind("focus.button",function(){b(this).addClass("ui-state-focus")}).bind("blur.button",function(){b(this).removeClass("ui-state-focus")});d&&this.element.bind("change.button",function(){e.refresh()});if(this.type==="checkbox")this.buttonElement.bind("click.button",function(){if(a.disabled)return false;b(this).toggleClass("ui-state-active");\ne.buttonElement.attr("aria-pressed",e.element[0].checked)});else if(this.type==="radio")this.buttonElement.bind("click.button",function(){if(a.disabled)return false;b(this).addClass("ui-state-active");e.buttonElement.attr("aria-pressed",true);var i=e.element[0];g(i).not(i).map(function(){return b(this).button("widget")[0]}).removeClass("ui-state-active").attr("aria-pressed",false)});else{this.buttonElement.bind("mousedown.button",function(){if(a.disabled)return false;b(this).addClass("ui-state-active");\nc=this;b(document).one("mouseup",function(){c=null})}).bind("mouseup.button",function(){if(a.disabled)return false;b(this).removeClass("ui-state-active")}).bind("keydown.button",function(i){if(a.disabled)return false;if(i.keyCode==b.ui.keyCode.SPACE||i.keyCode==b.ui.keyCode.ENTER)b(this).addClass("ui-state-active")}).bind("keyup.button",function(){b(this).removeClass("ui-state-active")});this.buttonElement.is("a")&&this.buttonElement.keyup(function(i){i.keyCode===b.ui.keyCode.SPACE&&b(this).click()})}this._setOption("disabled",\na.disabled)},_determineButtonType:function(){this.type=this.element.is(":checkbox")?"checkbox":this.element.is(":radio")?"radio":this.element.is("input")?"input":"button";if(this.type==="checkbox"||this.type==="radio"){this.buttonElement=this.element.parents().last().find("label[for="+this.element.attr("id")+"]");this.element.addClass("ui-helper-hidden-accessible");var e=this.element.is(":checked");e&&this.buttonElement.addClass("ui-state-active");this.buttonElement.attr("aria-pressed",e)}else this.buttonElement=\nthis.element},widget:function(){return this.buttonElement},destroy:function(){this.element.removeClass("ui-helper-hidden-accessible");this.buttonElement.removeClass("ui-button ui-widget ui-state-default ui-corner-all ui-state-hover ui-state-active ui-button-icons-only ui-button-icon-only ui-button-text-icons ui-button-text-icon-primary ui-button-text-icon-secondary ui-button-text-only").removeAttr("role").removeAttr("aria-pressed").html(this.buttonElement.find(".ui-button-text").html());this.hasTitle||\nthis.buttonElement.removeAttr("title");b.Widget.prototype.destroy.call(this)},_setOption:function(e,a){b.Widget.prototype._setOption.apply(this,arguments);if(e==="disabled")a?this.element.attr("disabled",true):this.element.removeAttr("disabled");this._resetButton()},refresh:function(){var e=this.element.is(":disabled");e!==this.options.disabled&&this._setOption("disabled",e);if(this.type==="radio")g(this.element[0]).each(function(){b(this).is(":checked")?b(this).button("widget").addClass("ui-state-active").attr("aria-pressed",\ntrue):b(this).button("widget").removeClass("ui-state-active").attr("aria-pressed",false)});else if(this.type==="checkbox")this.element.is(":checked")?this.buttonElement.addClass("ui-state-active").attr("aria-pressed",true):this.buttonElement.removeClass("ui-state-active").attr("aria-pressed",false)},_resetButton:function(){if(this.type==="input")this.options.label&&this.element.val(this.options.label);else{var e=this.buttonElement.removeClass("ui-button-icons-only ui-button-icon-only ui-button-text-icons ui-button-text-icon-primary ui-button-text-icon-secondary ui-button-text-only"),\na=b("<span></span>").addClass("ui-button-text").html(this.options.label).appendTo(e.empty()).text(),d=this.options.icons,h=d.primary&&d.secondary;if(d.primary||d.secondary){e.addClass("ui-button-text-icon"+(h?"s":d.primary?"-primary":"-secondary"));d.primary&&e.prepend("<span class=\'ui-button-icon-primary ui-icon "+d.primary+"\'></span>");d.secondary&&e.append("<span class=\'ui-button-icon-secondary ui-icon "+d.secondary+"\'></span>");if(!this.options.text){e.addClass(h?"ui-button-icons-only":"ui-button-icon-only").removeClass("ui-button-text-icons ui-button-text-icon-primary ui-button-text-icon-secondary");\nthis.hasTitle||e.attr("title",a)}}else e.addClass("ui-button-text-only")}}});b.widget("ui.buttonset",{options:{items:":button, :submit, :reset, :checkbox, :radio, a, :data(button)"},_create:function(){this.element.addClass("ui-buttonset")},_init:function(){this.refresh()},_setOption:function(e,a){e==="disabled"&&this.buttons.button("option",e,a);b.Widget.prototype._setOption.apply(this,arguments)},refresh:function(){this.buttons=this.element.find(this.options.items).filter(":ui-button").button("refresh").end().not(":ui-button").button().end().map(function(){return b(this).button("widget")[0]}).removeClass("ui-corner-all ui-corner-left ui-corner-right").filter(":first").addClass("ui-corner-left").end().filter(":last").addClass("ui-corner-right").end().end()},\ndestroy:function(){this.element.removeClass("ui-buttonset");this.buttons.map(function(){return b(this).button("widget")[0]}).removeClass("ui-corner-left ui-corner-right").end().button("destroy");b.Widget.prototype.destroy.call(this)}})})(jQuery);\n(function(b,c){function f(){this.debug=false;this._curInst=null;this._keyEvent=false;this._disabledInputs=[];this._inDialog=this._datepickerShowing=false;this._mainDivId="ui-datepicker-div";this._inlineClass="ui-datepicker-inline";this._appendClass="ui-datepicker-append";this._triggerClass="ui-datepicker-trigger";this._dialogClass="ui-datepicker-dialog";this._disableClass="ui-datepicker-disabled";this._unselectableClass="ui-datepicker-unselectable";this._currentClass="ui-datepicker-current-day";this._dayOverClass=\n"ui-datepicker-days-cell-over";this.regional=[];this.regional[""]={closeText:"Done",prevText:"Prev",nextText:"Next",currentText:"Today",monthNames:["January","February","March","April","May","June","July","August","September","October","November","December"],monthNamesShort:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],dayNames:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],dayNamesShort:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],dayNamesMin:["Su",\n"Mo","Tu","We","Th","Fr","Sa"],weekHeader:"Wk",dateFormat:"mm/dd/yy",firstDay:0,isRTL:false,showMonthAfterYear:false,yearSuffix:""};this._defaults={showOn:"focus",showAnim:"fadeIn",showOptions:{},defaultDate:null,appendText:"",buttonText:"...",buttonImage:"",buttonImageOnly:false,hideIfNoPrevNext:false,navigationAsDateFormat:false,gotoCurrent:false,changeMonth:false,changeYear:false,yearRange:"c-10:c+10",showOtherMonths:false,selectOtherMonths:false,showWeek:false,calculateWeek:this.iso8601Week,shortYearCutoff:"+10",\nminDate:null,maxDate:null,duration:"fast",beforeShowDay:null,beforeShow:null,onSelect:null,onChangeMonthYear:null,onClose:null,numberOfMonths:1,showCurrentAtPos:0,stepMonths:1,stepBigMonths:12,altField:"",altFormat:"",constrainInput:true,showButtonPanel:false,autoSize:false};b.extend(this._defaults,this.regional[""]);this.dpDiv=b(\'<div id="\'+this._mainDivId+\'" class="ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all"></div>\')}function g(a,d){b.extend(a,d);for(var h in d)if(d[h]==\nnull||d[h]==c)a[h]=d[h];return a}b.extend(b.ui,{datepicker:{version:"1.8.9"}});var e=(new Date).getTime();b.extend(f.prototype,{markerClassName:"hasDatepicker",log:function(){this.debug&&console.log.apply("",arguments)},_widgetDatepicker:function(){return this.dpDiv},setDefaults:function(a){g(this._defaults,a||{});return this},_attachDatepicker:function(a,d){var h=null;for(var i in this._defaults){var j=a.getAttribute("date:"+i);if(j){h=h||{};try{h[i]=eval(j)}catch(n){h[i]=j}}}i=a.nodeName.toLowerCase();\nj=i=="div"||i=="span";if(!a.id){this.uuid+=1;a.id="dp"+this.uuid}var q=this._newInst(b(a),j);q.settings=b.extend({},d||{},h||{});if(i=="input")this._connectDatepicker(a,q);else j&&this._inlineDatepicker(a,q)},_newInst:function(a,d){return{id:a[0].id.replace(/([^A-Za-z0-9_-])/g,"\\\\\\\\$1"),input:a,selectedDay:0,selectedMonth:0,selectedYear:0,drawMonth:0,drawYear:0,inline:d,dpDiv:!d?this.dpDiv:b(\'<div class="\'+this._inlineClass+\' ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all"></div>\')}},\n_connectDatepicker:function(a,d){var h=b(a);d.append=b([]);d.trigger=b([]);if(!h.hasClass(this.markerClassName)){this._attachments(h,d);h.addClass(this.markerClassName).keydown(this._doKeyDown).keypress(this._doKeyPress).keyup(this._doKeyUp).bind("setData.datepicker",function(i,j,n){d.settings[j]=n}).bind("getData.datepicker",function(i,j){return this._get(d,j)});this._autoSize(d);b.data(a,"datepicker",d)}},_attachments:function(a,d){var h=this._get(d,"appendText"),i=this._get(d,"isRTL");d.append&&\nd.append.remove();if(h){d.append=b(\'<span class="\'+this._appendClass+\'">\'+h+"</span>");a[i?"before":"after"](d.append)}a.unbind("focus",this._showDatepicker);d.trigger&&d.trigger.remove();h=this._get(d,"showOn");if(h=="focus"||h=="both")a.focus(this._showDatepicker);if(h=="button"||h=="both"){h=this._get(d,"buttonText");var j=this._get(d,"buttonImage");d.trigger=b(this._get(d,"buttonImageOnly")?b("<img/>").addClass(this._triggerClass).attr({src:j,alt:h,title:h}):b(\'<button type="button"></button>\').addClass(this._triggerClass).html(j==\n""?h:b("<img/>").attr({src:j,alt:h,title:h})));a[i?"before":"after"](d.trigger);d.trigger.click(function(){b.datepicker._datepickerShowing&&b.datepicker._lastInput==a[0]?b.datepicker._hideDatepicker():b.datepicker._showDatepicker(a[0]);return false})}},_autoSize:function(a){if(this._get(a,"autoSize")&&!a.inline){var d=new Date(2009,11,20),h=this._get(a,"dateFormat");if(h.match(/[DM]/)){var i=function(j){for(var n=0,q=0,l=0;l<j.length;l++)if(j[l].length>n){n=j[l].length;q=l}return q};d.setMonth(i(this._get(a,\nh.match(/MM/)?"monthNames":"monthNamesShort")));d.setDate(i(this._get(a,h.match(/DD/)?"dayNames":"dayNamesShort"))+20-d.getDay())}a.input.attr("size",this._formatDate(a,d).length)}},_inlineDatepicker:function(a,d){var h=b(a);if(!h.hasClass(this.markerClassName)){h.addClass(this.markerClassName).append(d.dpDiv).bind("setData.datepicker",function(i,j,n){d.settings[j]=n}).bind("getData.datepicker",function(i,j){return this._get(d,j)});b.data(a,"datepicker",d);this._setDate(d,this._getDefaultDate(d),\ntrue);this._updateDatepicker(d);this._updateAlternate(d);d.dpDiv.show()}},_dialogDatepicker:function(a,d,h,i,j){a=this._dialogInst;if(!a){this.uuid+=1;this._dialogInput=b(\'<input type="text" id="\'+("dp"+this.uuid)+\'" style="position: absolute; top: -100px; width: 0px; z-index: -10;"/>\');this._dialogInput.keydown(this._doKeyDown);b("body").append(this._dialogInput);a=this._dialogInst=this._newInst(this._dialogInput,false);a.settings={};b.data(this._dialogInput[0],"datepicker",a)}g(a.settings,i||{});\nd=d&&d.constructor==Date?this._formatDate(a,d):d;this._dialogInput.val(d);this._pos=j?j.length?j:[j.pageX,j.pageY]:null;if(!this._pos)this._pos=[document.documentElement.clientWidth/2-100+(document.documentElement.scrollLeft||document.body.scrollLeft),document.documentElement.clientHeight/2-150+(document.documentElement.scrollTop||document.body.scrollTop)];this._dialogInput.css("left",this._pos[0]+20+"px").css("top",this._pos[1]+"px");a.settings.onSelect=h;this._inDialog=true;this.dpDiv.addClass(this._dialogClass);\nthis._showDatepicker(this._dialogInput[0]);b.blockUI&&b.blockUI(this.dpDiv);b.data(this._dialogInput[0],"datepicker",a);return this},_destroyDatepicker:function(a){var d=b(a),h=b.data(a,"datepicker");if(d.hasClass(this.markerClassName)){var i=a.nodeName.toLowerCase();b.removeData(a,"datepicker");if(i=="input"){h.append.remove();h.trigger.remove();d.removeClass(this.markerClassName).unbind("focus",this._showDatepicker).unbind("keydown",this._doKeyDown).unbind("keypress",this._doKeyPress).unbind("keyup",\nthis._doKeyUp)}else if(i=="div"||i=="span")d.removeClass(this.markerClassName).empty()}},_enableDatepicker:function(a){var d=b(a),h=b.data(a,"datepicker");if(d.hasClass(this.markerClassName)){var i=a.nodeName.toLowerCase();if(i=="input"){a.disabled=false;h.trigger.filter("button").each(function(){this.disabled=false}).end().filter("img").css({opacity:"1.0",cursor:""})}else if(i=="div"||i=="span")d.children("."+this._inlineClass).children().removeClass("ui-state-disabled");this._disabledInputs=b.map(this._disabledInputs,\nfunction(j){return j==a?null:j})}},_disableDatepicker:function(a){var d=b(a),h=b.data(a,"datepicker");if(d.hasClass(this.markerClassName)){var i=a.nodeName.toLowerCase();if(i=="input"){a.disabled=true;h.trigger.filter("button").each(function(){this.disabled=true}).end().filter("img").css({opacity:"0.5",cursor:"default"})}else if(i=="div"||i=="span")d.children("."+this._inlineClass).children().addClass("ui-state-disabled");this._disabledInputs=b.map(this._disabledInputs,function(j){return j==a?null:\nj});this._disabledInputs[this._disabledInputs.length]=a}},_isDisabledDatepicker:function(a){if(!a)return false;for(var d=0;d<this._disabledInputs.length;d++)if(this._disabledInputs[d]==a)return true;return false},_getInst:function(a){try{return b.data(a,"datepicker")}catch(d){throw"Missing instance data for this datepicker";}},_optionDatepicker:function(a,d,h){var i=this._getInst(a);if(arguments.length==2&&typeof d=="string")return d=="defaults"?b.extend({},b.datepicker._defaults):i?d=="all"?b.extend({},\ni.settings):this._get(i,d):null;var j=d||{};if(typeof d=="string"){j={};j[d]=h}if(i){this._curInst==i&&this._hideDatepicker();var n=this._getDateDatepicker(a,true);g(i.settings,j);this._attachments(b(a),i);this._autoSize(i);this._setDateDatepicker(a,n);this._updateDatepicker(i)}},_changeDatepicker:function(a,d,h){this._optionDatepicker(a,d,h)},_refreshDatepicker:function(a){(a=this._getInst(a))&&this._updateDatepicker(a)},_setDateDatepicker:function(a,d){if(a=this._getInst(a)){this._setDate(a,d);\nthis._updateDatepicker(a);this._updateAlternate(a)}},_getDateDatepicker:function(a,d){(a=this._getInst(a))&&!a.inline&&this._setDateFromField(a,d);return a?this._getDate(a):null},_doKeyDown:function(a){var d=b.datepicker._getInst(a.target),h=true,i=d.dpDiv.is(".ui-datepicker-rtl");d._keyEvent=true;if(b.datepicker._datepickerShowing)switch(a.keyCode){case 9:b.datepicker._hideDatepicker();h=false;break;case 13:h=b("td."+b.datepicker._dayOverClass+":not(."+b.datepicker._currentClass+")",d.dpDiv);h[0]?\nb.datepicker._selectDay(a.target,d.selectedMonth,d.selectedYear,h[0]):b.datepicker._hideDatepicker();return false;case 27:b.datepicker._hideDatepicker();break;case 33:b.datepicker._adjustDate(a.target,a.ctrlKey?-b.datepicker._get(d,"stepBigMonths"):-b.datepicker._get(d,"stepMonths"),"M");break;case 34:b.datepicker._adjustDate(a.target,a.ctrlKey?+b.datepicker._get(d,"stepBigMonths"):+b.datepicker._get(d,"stepMonths"),"M");break;case 35:if(a.ctrlKey||a.metaKey)b.datepicker._clearDate(a.target);h=a.ctrlKey||\na.metaKey;break;case 36:if(a.ctrlKey||a.metaKey)b.datepicker._gotoToday(a.target);h=a.ctrlKey||a.metaKey;break;case 37:if(a.ctrlKey||a.metaKey)b.datepicker._adjustDate(a.target,i?+1:-1,"D");h=a.ctrlKey||a.metaKey;if(a.originalEvent.altKey)b.datepicker._adjustDate(a.target,a.ctrlKey?-b.datepicker._get(d,"stepBigMonths"):-b.datepicker._get(d,"stepMonths"),"M");break;case 38:if(a.ctrlKey||a.metaKey)b.datepicker._adjustDate(a.target,-7,"D");h=a.ctrlKey||a.metaKey;break;case 39:if(a.ctrlKey||a.metaKey)b.datepicker._adjustDate(a.target,\ni?-1:+1,"D");h=a.ctrlKey||a.metaKey;if(a.originalEvent.altKey)b.datepicker._adjustDate(a.target,a.ctrlKey?+b.datepicker._get(d,"stepBigMonths"):+b.datepicker._get(d,"stepMonths"),"M");break;case 40:if(a.ctrlKey||a.metaKey)b.datepicker._adjustDate(a.target,+7,"D");h=a.ctrlKey||a.metaKey;break;default:h=false}else if(a.keyCode==36&&a.ctrlKey)b.datepicker._showDatepicker(this);else h=false;if(h){a.preventDefault();a.stopPropagation()}},_doKeyPress:function(a){var d=b.datepicker._getInst(a.target);if(b.datepicker._get(d,\n"constrainInput")){d=b.datepicker._possibleChars(b.datepicker._get(d,"dateFormat"));var h=String.fromCharCode(a.charCode==c?a.keyCode:a.charCode);return a.ctrlKey||a.metaKey||h<" "||!d||d.indexOf(h)>-1}},_doKeyUp:function(a){a=b.datepicker._getInst(a.target);if(a.input.val()!=a.lastVal)try{if(b.datepicker.parseDate(b.datepicker._get(a,"dateFormat"),a.input?a.input.val():null,b.datepicker._getFormatConfig(a))){b.datepicker._setDateFromField(a);b.datepicker._updateAlternate(a);b.datepicker._updateDatepicker(a)}}catch(d){b.datepicker.log(d)}return true},\n_showDatepicker:function(a){a=a.target||a;if(a.nodeName.toLowerCase()!="input")a=b("input",a.parentNode)[0];if(!(b.datepicker._isDisabledDatepicker(a)||b.datepicker._lastInput==a)){var d=b.datepicker._getInst(a);b.datepicker._curInst&&b.datepicker._curInst!=d&&b.datepicker._curInst.dpDiv.stop(true,true);var h=b.datepicker._get(d,"beforeShow");g(d.settings,h?h.apply(a,[a,d]):{});d.lastVal=null;b.datepicker._lastInput=a;b.datepicker._setDateFromField(d);if(b.datepicker._inDialog)a.value="";if(!b.datepicker._pos){b.datepicker._pos=\nb.datepicker._findPos(a);b.datepicker._pos[1]+=a.offsetHeight}var i=false;b(a).parents().each(function(){i|=b(this).css("position")=="fixed";return!i});if(i&&b.browser.opera){b.datepicker._pos[0]-=document.documentElement.scrollLeft;b.datepicker._pos[1]-=document.documentElement.scrollTop}h={left:b.datepicker._pos[0],top:b.datepicker._pos[1]};b.datepicker._pos=null;d.dpDiv.empty();d.dpDiv.css({position:"absolute",display:"block",top:"-1000px"});b.datepicker._updateDatepicker(d);h=b.datepicker._checkOffset(d,\nh,i);d.dpDiv.css({position:b.datepicker._inDialog&&b.blockUI?"static":i?"fixed":"absolute",display:"none",left:h.left+"px",top:h.top+"px"});if(!d.inline){h=b.datepicker._get(d,"showAnim");var j=b.datepicker._get(d,"duration"),n=function(){b.datepicker._datepickerShowing=true;var q=d.dpDiv.find("iframe.ui-datepicker-cover");if(q.length){var l=b.datepicker._getBorders(d.dpDiv);q.css({left:-l[0],top:-l[1],width:d.dpDiv.outerWidth(),height:d.dpDiv.outerHeight()})}};d.dpDiv.zIndex(b(a).zIndex()+1);b.effects&&\nb.effects[h]?d.dpDiv.show(h,b.datepicker._get(d,"showOptions"),j,n):d.dpDiv[h||"show"](h?j:null,n);if(!h||!j)n();d.input.is(":visible")&&!d.input.is(":disabled")&&d.input.focus();b.datepicker._curInst=d}}},_updateDatepicker:function(a){var d=this,h=b.datepicker._getBorders(a.dpDiv);a.dpDiv.empty().append(this._generateHTML(a));var i=a.dpDiv.find("iframe.ui-datepicker-cover");i.length&&i.css({left:-h[0],top:-h[1],width:a.dpDiv.outerWidth(),height:a.dpDiv.outerHeight()});a.dpDiv.find("button, .ui-datepicker-prev, .ui-datepicker-next, .ui-datepicker-calendar td a").bind("mouseout",\nfunction(){b(this).removeClass("ui-state-hover");this.className.indexOf("ui-datepicker-prev")!=-1&&b(this).removeClass("ui-datepicker-prev-hover");this.className.indexOf("ui-datepicker-next")!=-1&&b(this).removeClass("ui-datepicker-next-hover")}).bind("mouseover",function(){if(!d._isDisabledDatepicker(a.inline?a.dpDiv.parent()[0]:a.input[0])){b(this).parents(".ui-datepicker-calendar").find("a").removeClass("ui-state-hover");b(this).addClass("ui-state-hover");this.className.indexOf("ui-datepicker-prev")!=\n-1&&b(this).addClass("ui-datepicker-prev-hover");this.className.indexOf("ui-datepicker-next")!=-1&&b(this).addClass("ui-datepicker-next-hover")}}).end().find("."+this._dayOverClass+" a").trigger("mouseover").end();h=this._getNumberOfMonths(a);i=h[1];i>1?a.dpDiv.addClass("ui-datepicker-multi-"+i).css("width",17*i+"em"):a.dpDiv.removeClass("ui-datepicker-multi-2 ui-datepicker-multi-3 ui-datepicker-multi-4").width("");a.dpDiv[(h[0]!=1||h[1]!=1?"add":"remove")+"Class"]("ui-datepicker-multi");a.dpDiv[(this._get(a,\n"isRTL")?"add":"remove")+"Class"]("ui-datepicker-rtl");a==b.datepicker._curInst&&b.datepicker._datepickerShowing&&a.input&&a.input.is(":visible")&&!a.input.is(":disabled")&&a.input.focus();if(a.yearshtml){var j=a.yearshtml;setTimeout(function(){j===a.yearshtml&&a.dpDiv.find("select.ui-datepicker-year:first").replaceWith(a.yearshtml);j=a.yearshtml=null},0)}},_getBorders:function(a){var d=function(h){return{thin:1,medium:2,thick:3}[h]||h};return[parseFloat(d(a.css("border-left-width"))),parseFloat(d(a.css("border-top-width")))]},\n_checkOffset:function(a,d,h){var i=a.dpDiv.outerWidth(),j=a.dpDiv.outerHeight(),n=a.input?a.input.outerWidth():0,q=a.input?a.input.outerHeight():0,l=document.documentElement.clientWidth+b(document).scrollLeft(),k=document.documentElement.clientHeight+b(document).scrollTop();d.left-=this._get(a,"isRTL")?i-n:0;d.left-=h&&d.left==a.input.offset().left?b(document).scrollLeft():0;d.top-=h&&d.top==a.input.offset().top+q?b(document).scrollTop():0;d.left-=Math.min(d.left,d.left+i>l&&l>i?Math.abs(d.left+i-\nl):0);d.top-=Math.min(d.top,d.top+j>k&&k>j?Math.abs(j+q):0);return d},_findPos:function(a){for(var d=this._get(this._getInst(a),"isRTL");a&&(a.type=="hidden"||a.nodeType!=1);)a=a[d?"previousSibling":"nextSibling"];a=b(a).offset();return[a.left,a.top]},_hideDatepicker:function(a){var d=this._curInst;if(!(!d||a&&d!=b.data(a,"datepicker")))if(this._datepickerShowing){a=this._get(d,"showAnim");var h=this._get(d,"duration"),i=function(){b.datepicker._tidyDialog(d);this._curInst=null};b.effects&&b.effects[a]?\nd.dpDiv.hide(a,b.datepicker._get(d,"showOptions"),h,i):d.dpDiv[a=="slideDown"?"slideUp":a=="fadeIn"?"fadeOut":"hide"](a?h:null,i);a||i();if(a=this._get(d,"onClose"))a.apply(d.input?d.input[0]:null,[d.input?d.input.val():"",d]);this._datepickerShowing=false;this._lastInput=null;if(this._inDialog){this._dialogInput.css({position:"absolute",left:"0",top:"-100px"});if(b.blockUI){b.unblockUI();b("body").append(this.dpDiv)}}this._inDialog=false}},_tidyDialog:function(a){a.dpDiv.removeClass(this._dialogClass).unbind(".ui-datepicker-calendar")},\n_checkExternalClick:function(a){if(b.datepicker._curInst){a=b(a.target);a[0].id!=b.datepicker._mainDivId&&a.parents("#"+b.datepicker._mainDivId).length==0&&!a.hasClass(b.datepicker.markerClassName)&&!a.hasClass(b.datepicker._triggerClass)&&b.datepicker._datepickerShowing&&!(b.datepicker._inDialog&&b.blockUI)&&b.datepicker._hideDatepicker()}},_adjustDate:function(a,d,h){a=b(a);var i=this._getInst(a[0]);if(!this._isDisabledDatepicker(a[0])){this._adjustInstDate(i,d+(h=="M"?this._get(i,"showCurrentAtPos"):\n0),h);this._updateDatepicker(i)}},_gotoToday:function(a){a=b(a);var d=this._getInst(a[0]);if(this._get(d,"gotoCurrent")&&d.currentDay){d.selectedDay=d.currentDay;d.drawMonth=d.selectedMonth=d.currentMonth;d.drawYear=d.selectedYear=d.currentYear}else{var h=new Date;d.selectedDay=h.getDate();d.drawMonth=d.selectedMonth=h.getMonth();d.drawYear=d.selectedYear=h.getFullYear()}this._notifyChange(d);this._adjustDate(a)},_selectMonthYear:function(a,d,h){a=b(a);var i=this._getInst(a[0]);i._selectingMonthYear=\nfalse;i["selected"+(h=="M"?"Month":"Year")]=i["draw"+(h=="M"?"Month":"Year")]=parseInt(d.options[d.selectedIndex].value,10);this._notifyChange(i);this._adjustDate(a)},_clickMonthYear:function(a){var d=this._getInst(b(a)[0]);d.input&&d._selectingMonthYear&&setTimeout(function(){d.input.focus()},0);d._selectingMonthYear=!d._selectingMonthYear},_selectDay:function(a,d,h,i){var j=b(a);if(!(b(i).hasClass(this._unselectableClass)||this._isDisabledDatepicker(j[0]))){j=this._getInst(j[0]);j.selectedDay=j.currentDay=\nb("a",i).html();j.selectedMonth=j.currentMonth=d;j.selectedYear=j.currentYear=h;this._selectDate(a,this._formatDate(j,j.currentDay,j.currentMonth,j.currentYear))}},_clearDate:function(a){a=b(a);this._getInst(a[0]);this._selectDate(a,"")},_selectDate:function(a,d){a=this._getInst(b(a)[0]);d=d!=null?d:this._formatDate(a);a.input&&a.input.val(d);this._updateAlternate(a);var h=this._get(a,"onSelect");if(h)h.apply(a.input?a.input[0]:null,[d,a]);else a.input&&a.input.trigger("change");if(a.inline)this._updateDatepicker(a);\nelse{this._hideDatepicker();this._lastInput=a.input[0];typeof a.input[0]!="object"&&a.input.focus();this._lastInput=null}},_updateAlternate:function(a){var d=this._get(a,"altField");if(d){var h=this._get(a,"altFormat")||this._get(a,"dateFormat"),i=this._getDate(a),j=this.formatDate(h,i,this._getFormatConfig(a));b(d).each(function(){b(this).val(j)})}},noWeekends:function(a){a=a.getDay();return[a>0&&a<6,""]},iso8601Week:function(a){a=new Date(a.getTime());a.setDate(a.getDate()+4-(a.getDay()||7));var d=\na.getTime();a.setMonth(0);a.setDate(1);return Math.floor(Math.round((d-a)/864E5)/7)+1},parseDate:function(a,d,h){if(a==null||d==null)throw"Invalid arguments";d=typeof d=="object"?d.toString():d+"";if(d=="")return null;var i=(h?h.shortYearCutoff:null)||this._defaults.shortYearCutoff;i=typeof i!="string"?i:(new Date).getFullYear()%100+parseInt(i,10);for(var j=(h?h.dayNamesShort:null)||this._defaults.dayNamesShort,n=(h?h.dayNames:null)||this._defaults.dayNames,q=(h?h.monthNamesShort:null)||this._defaults.monthNamesShort,\nl=(h?h.monthNames:null)||this._defaults.monthNames,k=h=-1,m=-1,o=-1,p=false,s=function(x){(x=y+1<a.length&&a.charAt(y+1)==x)&&y++;return x},r=function(x){var C=s(x);x=new RegExp("^\\\\d{1,"+(x=="@"?14:x=="!"?20:x=="y"&&C?4:x=="o"?3:2)+"}");x=d.substring(w).match(x);if(!x)throw"Missing number at position "+w;w+=x[0].length;return parseInt(x[0],10)},u=function(x,C,J){x=s(x)?J:C;for(C=0;C<x.length;C++)if(d.substr(w,x[C].length).toLowerCase()==x[C].toLowerCase()){w+=x[C].length;return C+1}throw"Unknown name at position "+\nw;},v=function(){if(d.charAt(w)!=a.charAt(y))throw"Unexpected literal at position "+w;w++},w=0,y=0;y<a.length;y++)if(p)if(a.charAt(y)=="\'"&&!s("\'"))p=false;else v();else switch(a.charAt(y)){case "d":m=r("d");break;case "D":u("D",j,n);break;case "o":o=r("o");break;case "m":k=r("m");break;case "M":k=u("M",q,l);break;case "y":h=r("y");break;case "@":var B=new Date(r("@"));h=B.getFullYear();k=B.getMonth()+1;m=B.getDate();break;case "!":B=new Date((r("!")-this._ticksTo1970)/1E4);h=B.getFullYear();k=B.getMonth()+\n1;m=B.getDate();break;case "\'":if(s("\'"))v();else p=true;break;default:v()}if(h==-1)h=(new Date).getFullYear();else if(h<100)h+=(new Date).getFullYear()-(new Date).getFullYear()%100+(h<=i?0:-100);if(o>-1){k=1;m=o;do{i=this._getDaysInMonth(h,k-1);if(m<=i)break;k++;m-=i}while(1)}B=this._daylightSavingAdjust(new Date(h,k-1,m));if(B.getFullYear()!=h||B.getMonth()+1!=k||B.getDate()!=m)throw"Invalid date";return B},ATOM:"yy-mm-dd",COOKIE:"D, dd M yy",ISO_8601:"yy-mm-dd",RFC_822:"D, d M y",RFC_850:"DD, dd-M-y",\nRFC_1036:"D, d M y",RFC_1123:"D, d M yy",RFC_2822:"D, d M yy",RSS:"D, d M y",TICKS:"!",TIMESTAMP:"@",W3C:"yy-mm-dd",_ticksTo1970:(718685+Math.floor(492.5)-Math.floor(19.7)+Math.floor(4.925))*24*60*60*1E7,formatDate:function(a,d,h){if(!d)return"";var i=(h?h.dayNamesShort:null)||this._defaults.dayNamesShort,j=(h?h.dayNames:null)||this._defaults.dayNames,n=(h?h.monthNamesShort:null)||this._defaults.monthNamesShort;h=(h?h.monthNames:null)||this._defaults.monthNames;var q=function(s){(s=p+1<a.length&&\na.charAt(p+1)==s)&&p++;return s},l=function(s,r,u){r=""+r;if(q(s))for(;r.length<u;)r="0"+r;return r},k=function(s,r,u,v){return q(s)?v[r]:u[r]},m="",o=false;if(d)for(var p=0;p<a.length;p++)if(o)if(a.charAt(p)=="\'"&&!q("\'"))o=false;else m+=a.charAt(p);else switch(a.charAt(p)){case "d":m+=l("d",d.getDate(),2);break;case "D":m+=k("D",d.getDay(),i,j);break;case "o":m+=l("o",(d.getTime()-(new Date(d.getFullYear(),0,0)).getTime())/864E5,3);break;case "m":m+=l("m",d.getMonth()+1,2);break;case "M":m+=k("M",\nd.getMonth(),n,h);break;case "y":m+=q("y")?d.getFullYear():(d.getYear()%100<10?"0":"")+d.getYear()%100;break;case "@":m+=d.getTime();break;case "!":m+=d.getTime()*1E4+this._ticksTo1970;break;case "\'":if(q("\'"))m+="\'";else o=true;break;default:m+=a.charAt(p)}return m},_possibleChars:function(a){for(var d="",h=false,i=function(n){(n=j+1<a.length&&a.charAt(j+1)==n)&&j++;return n},j=0;j<a.length;j++)if(h)if(a.charAt(j)=="\'"&&!i("\'"))h=false;else d+=a.charAt(j);else switch(a.charAt(j)){case "d":case "m":case "y":case "@":d+=\n"0123456789";break;case "D":case "M":return null;case "\'":if(i("\'"))d+="\'";else h=true;break;default:d+=a.charAt(j)}return d},_get:function(a,d){return a.settings[d]!==c?a.settings[d]:this._defaults[d]},_setDateFromField:function(a,d){if(a.input.val()!=a.lastVal){var h=this._get(a,"dateFormat"),i=a.lastVal=a.input?a.input.val():null,j,n;j=n=this._getDefaultDate(a);var q=this._getFormatConfig(a);try{j=this.parseDate(h,i,q)||n}catch(l){this.log(l);i=d?"":i}a.selectedDay=j.getDate();a.drawMonth=a.selectedMonth=\nj.getMonth();a.drawYear=a.selectedYear=j.getFullYear();a.currentDay=i?j.getDate():0;a.currentMonth=i?j.getMonth():0;a.currentYear=i?j.getFullYear():0;this._adjustInstDate(a)}},_getDefaultDate:function(a){return this._restrictMinMax(a,this._determineDate(a,this._get(a,"defaultDate"),new Date))},_determineDate:function(a,d,h){var i=function(n){var q=new Date;q.setDate(q.getDate()+n);return q},j=function(n){try{return b.datepicker.parseDate(b.datepicker._get(a,"dateFormat"),n,b.datepicker._getFormatConfig(a))}catch(q){}var l=\n(n.toLowerCase().match(/^c/)?b.datepicker._getDate(a):null)||new Date,k=l.getFullYear(),m=l.getMonth();l=l.getDate();for(var o=/([+-]?[0-9]+)\\s*(d|D|w|W|m|M|y|Y)?/g,p=o.exec(n);p;){switch(p[2]||"d"){case "d":case "D":l+=parseInt(p[1],10);break;case "w":case "W":l+=parseInt(p[1],10)*7;break;case "m":case "M":m+=parseInt(p[1],10);l=Math.min(l,b.datepicker._getDaysInMonth(k,m));break;case "y":case "Y":k+=parseInt(p[1],10);l=Math.min(l,b.datepicker._getDaysInMonth(k,m));break}p=o.exec(n)}return new Date(k,\nm,l)};if(d=(d=d==null||d===""?h:typeof d=="string"?j(d):typeof d=="number"?isNaN(d)?h:i(d):new Date(d.getTime()))&&d.toString()=="Invalid Date"?h:d){d.setHours(0);d.setMinutes(0);d.setSeconds(0);d.setMilliseconds(0)}return this._daylightSavingAdjust(d)},_daylightSavingAdjust:function(a){if(!a)return null;a.setHours(a.getHours()>12?a.getHours()+2:0);return a},_setDate:function(a,d,h){var i=!d,j=a.selectedMonth,n=a.selectedYear;d=this._restrictMinMax(a,this._determineDate(a,d,new Date));a.selectedDay=\na.currentDay=d.getDate();a.drawMonth=a.selectedMonth=a.currentMonth=d.getMonth();a.drawYear=a.selectedYear=a.currentYear=d.getFullYear();if((j!=a.selectedMonth||n!=a.selectedYear)&&!h)this._notifyChange(a);this._adjustInstDate(a);if(a.input)a.input.val(i?"":this._formatDate(a))},_getDate:function(a){return!a.currentYear||a.input&&a.input.val()==""?null:this._daylightSavingAdjust(new Date(a.currentYear,a.currentMonth,a.currentDay))},_generateHTML:function(a){var d=new Date;d=this._daylightSavingAdjust(new Date(d.getFullYear(),\nd.getMonth(),d.getDate()));var h=this._get(a,"isRTL"),i=this._get(a,"showButtonPanel"),j=this._get(a,"hideIfNoPrevNext"),n=this._get(a,"navigationAsDateFormat"),q=this._getNumberOfMonths(a),l=this._get(a,"showCurrentAtPos"),k=this._get(a,"stepMonths"),m=q[0]!=1||q[1]!=1,o=this._daylightSavingAdjust(!a.currentDay?new Date(9999,9,9):new Date(a.currentYear,a.currentMonth,a.currentDay)),p=this._getMinMaxDate(a,"min"),s=this._getMinMaxDate(a,"max");l=a.drawMonth-l;var r=a.drawYear;if(l<0){l+=12;r--}if(s){var u=\nthis._daylightSavingAdjust(new Date(s.getFullYear(),s.getMonth()-q[0]*q[1]+1,s.getDate()));for(u=p&&u<p?p:u;this._daylightSavingAdjust(new Date(r,l,1))>u;){l--;if(l<0){l=11;r--}}}a.drawMonth=l;a.drawYear=r;u=this._get(a,"prevText");u=!n?u:this.formatDate(u,this._daylightSavingAdjust(new Date(r,l-k,1)),this._getFormatConfig(a));u=this._canAdjustMonth(a,-1,r,l)?\'<a class="ui-datepicker-prev ui-corner-all" onclick="DP_jQuery_\'+e+".datepicker._adjustDate(\'#"+a.id+"\', -"+k+", \'M\');\\" title=\\""+u+\'"><span class="ui-icon ui-icon-circle-triangle-\'+\n(h?"e":"w")+\'">\'+u+"</span></a>":j?"":\'<a class="ui-datepicker-prev ui-corner-all ui-state-disabled" title="\'+u+\'"><span class="ui-icon ui-icon-circle-triangle-\'+(h?"e":"w")+\'">\'+u+"</span></a>";var v=this._get(a,"nextText");v=!n?v:this.formatDate(v,this._daylightSavingAdjust(new Date(r,l+k,1)),this._getFormatConfig(a));j=this._canAdjustMonth(a,+1,r,l)?\'<a class="ui-datepicker-next ui-corner-all" onclick="DP_jQuery_\'+e+".datepicker._adjustDate(\'#"+a.id+"\', +"+k+", \'M\');\\" title=\\""+v+\'"><span class="ui-icon ui-icon-circle-triangle-\'+\n(h?"w":"e")+\'">\'+v+"</span></a>":j?"":\'<a class="ui-datepicker-next ui-corner-all ui-state-disabled" title="\'+v+\'"><span class="ui-icon ui-icon-circle-triangle-\'+(h?"w":"e")+\'">\'+v+"</span></a>";k=this._get(a,"currentText");v=this._get(a,"gotoCurrent")&&a.currentDay?o:d;k=!n?k:this.formatDate(k,v,this._getFormatConfig(a));n=!a.inline?\'<button type="button" class="ui-datepicker-close ui-state-default ui-priority-primary ui-corner-all" onclick="DP_jQuery_\'+e+\'.datepicker._hideDatepicker();">\'+this._get(a,\n"closeText")+"</button>":"";i=i?\'<div class="ui-datepicker-buttonpane ui-widget-content">\'+(h?n:"")+(this._isInRange(a,v)?\'<button type="button" class="ui-datepicker-current ui-state-default ui-priority-secondary ui-corner-all" onclick="DP_jQuery_\'+e+".datepicker._gotoToday(\'#"+a.id+"\');\\">"+k+"</button>":"")+(h?"":n)+"</div>":"";n=parseInt(this._get(a,"firstDay"),10);n=isNaN(n)?0:n;k=this._get(a,"showWeek");v=this._get(a,"dayNames");this._get(a,"dayNamesShort");var w=this._get(a,"dayNamesMin"),y=\nthis._get(a,"monthNames"),B=this._get(a,"monthNamesShort"),x=this._get(a,"beforeShowDay"),C=this._get(a,"showOtherMonths"),J=this._get(a,"selectOtherMonths");this._get(a,"calculateWeek");for(var M=this._getDefaultDate(a),K="",G=0;G<q[0];G++){for(var N="",H=0;H<q[1];H++){var O=this._daylightSavingAdjust(new Date(r,l,a.selectedDay)),A=" ui-corner-all",D="";if(m){D+=\'<div class="ui-datepicker-group\';if(q[1]>1)switch(H){case 0:D+=" ui-datepicker-group-first";A=" ui-corner-"+(h?"right":"left");break;case q[1]-\n1:D+=" ui-datepicker-group-last";A=" ui-corner-"+(h?"left":"right");break;default:D+=" ui-datepicker-group-middle";A="";break}D+=\'">\'}D+=\'<div class="ui-datepicker-header ui-widget-header ui-helper-clearfix\'+A+\'">\'+(/all|left/.test(A)&&G==0?h?j:u:"")+(/all|right/.test(A)&&G==0?h?u:j:"")+this._generateMonthYearHeader(a,l,r,p,s,G>0||H>0,y,B)+\'</div><table class="ui-datepicker-calendar"><thead><tr>\';var E=k?\'<th class="ui-datepicker-week-col">\'+this._get(a,"weekHeader")+"</th>":"";for(A=0;A<7;A++){var z=\n(A+n)%7;E+="<th"+((A+n+6)%7>=5?\' class="ui-datepicker-week-end"\':"")+\'><span title="\'+v[z]+\'">\'+w[z]+"</span></th>"}D+=E+"</tr></thead><tbody>";E=this._getDaysInMonth(r,l);if(r==a.selectedYear&&l==a.selectedMonth)a.selectedDay=Math.min(a.selectedDay,E);A=(this._getFirstDayOfMonth(r,l)-n+7)%7;E=m?6:Math.ceil((A+E)/7);z=this._daylightSavingAdjust(new Date(r,l,1-A));for(var P=0;P<E;P++){D+="<tr>";var Q=!k?"":\'<td class="ui-datepicker-week-col">\'+this._get(a,"calculateWeek")(z)+"</td>";for(A=0;A<7;A++){var I=\nx?x.apply(a.input?a.input[0]:null,[z]):[true,""],F=z.getMonth()!=l,L=F&&!J||!I[0]||p&&z<p||s&&z>s;Q+=\'<td class="\'+((A+n+6)%7>=5?" ui-datepicker-week-end":"")+(F?" ui-datepicker-other-month":"")+(z.getTime()==O.getTime()&&l==a.selectedMonth&&a._keyEvent||M.getTime()==z.getTime()&&M.getTime()==O.getTime()?" "+this._dayOverClass:"")+(L?" "+this._unselectableClass+" ui-state-disabled":"")+(F&&!C?"":" "+I[1]+(z.getTime()==o.getTime()?" "+this._currentClass:"")+(z.getTime()==d.getTime()?" ui-datepicker-today":\n""))+\'"\'+((!F||C)&&I[2]?\' title="\'+I[2]+\'"\':"")+(L?"":\' onclick="DP_jQuery_\'+e+".datepicker._selectDay(\'#"+a.id+"\',"+z.getMonth()+","+z.getFullYear()+\', this);return false;"\')+">"+(F&&!C?" ":L?\'<span class="ui-state-default">\'+z.getDate()+"</span>":\'<a class="ui-state-default\'+(z.getTime()==d.getTime()?" ui-state-highlight":"")+(z.getTime()==o.getTime()?" ui-state-active":"")+(F?" ui-priority-secondary":"")+\'" href="#">\'+z.getDate()+"</a>")+"</td>";z.setDate(z.getDate()+1);z=this._daylightSavingAdjust(z)}D+=\nQ+"</tr>"}l++;if(l>11){l=0;r++}D+="</tbody></table>"+(m?"</div>"+(q[0]>0&&H==q[1]-1?\'<div class="ui-datepicker-row-break"></div>\':""):"");N+=D}K+=N}K+=i+(b.browser.msie&&parseInt(b.browser.version,10)<7&&!a.inline?\'<iframe src="javascript:false;" class="ui-datepicker-cover" frameborder="0"></iframe>\':"");a._keyEvent=false;return K},_generateMonthYearHeader:function(a,d,h,i,j,n,q,l){var k=this._get(a,"changeMonth"),m=this._get(a,"changeYear"),o=this._get(a,"showMonthAfterYear"),p=\'<div class="ui-datepicker-title">\',\ns="";if(n||!k)s+=\'<span class="ui-datepicker-month">\'+q[d]+"</span>";else{q=i&&i.getFullYear()==h;var r=j&&j.getFullYear()==h;s+=\'<select class="ui-datepicker-month" onchange="DP_jQuery_\'+e+".datepicker._selectMonthYear(\'#"+a.id+"\', this, \'M\');\\" onclick=\\"DP_jQuery_"+e+".datepicker._clickMonthYear(\'#"+a.id+"\');\\">";for(var u=0;u<12;u++)if((!q||u>=i.getMonth())&&(!r||u<=j.getMonth()))s+=\'<option value="\'+u+\'"\'+(u==d?\' selected="selected"\':"")+">"+l[u]+"</option>";s+="</select>"}o||(p+=s+(n||!(k&&\nm)?" ":""));a.yearshtml="";if(n||!m)p+=\'<span class="ui-datepicker-year">\'+h+"</span>";else{l=this._get(a,"yearRange").split(":");var v=(new Date).getFullYear();q=function(w){w=w.match(/c[+-].*/)?h+parseInt(w.substring(1),10):w.match(/[+-].*/)?v+parseInt(w,10):parseInt(w,10);return isNaN(w)?v:w};d=q(l[0]);l=Math.max(d,q(l[1]||""));d=i?Math.max(d,i.getFullYear()):d;l=j?Math.min(l,j.getFullYear()):l;for(a.yearshtml+=\'<select class="ui-datepicker-year" onchange="DP_jQuery_\'+e+".datepicker._selectMonthYear(\'#"+\na.id+"\', this, \'Y\');\\" onclick=\\"DP_jQuery_"+e+".datepicker._clickMonthYear(\'#"+a.id+"\');\\">";d<=l;d++)a.yearshtml+=\'<option value="\'+d+\'"\'+(d==h?\' selected="selected"\':"")+">"+d+"</option>";a.yearshtml+="</select>";if(b.browser.mozilla)p+=\'<select class="ui-datepicker-year"><option value="\'+h+\'" selected="selected">\'+h+"</option></select>";else{p+=a.yearshtml;a.yearshtml=null}}p+=this._get(a,"yearSuffix");if(o)p+=(n||!(k&&m)?" ":"")+s;p+="</div>";return p},_adjustInstDate:function(a,d,h){var i=\na.drawYear+(h=="Y"?d:0),j=a.drawMonth+(h=="M"?d:0);d=Math.min(a.selectedDay,this._getDaysInMonth(i,j))+(h=="D"?d:0);i=this._restrictMinMax(a,this._daylightSavingAdjust(new Date(i,j,d)));a.selectedDay=i.getDate();a.drawMonth=a.selectedMonth=i.getMonth();a.drawYear=a.selectedYear=i.getFullYear();if(h=="M"||h=="Y")this._notifyChange(a)},_restrictMinMax:function(a,d){var h=this._getMinMaxDate(a,"min");a=this._getMinMaxDate(a,"max");d=h&&d<h?h:d;return d=a&&d>a?a:d},_notifyChange:function(a){var d=this._get(a,\n"onChangeMonthYear");if(d)d.apply(a.input?a.input[0]:null,[a.selectedYear,a.selectedMonth+1,a])},_getNumberOfMonths:function(a){a=this._get(a,"numberOfMonths");return a==null?[1,1]:typeof a=="number"?[1,a]:a},_getMinMaxDate:function(a,d){return this._determineDate(a,this._get(a,d+"Date"),null)},_getDaysInMonth:function(a,d){return 32-(new Date(a,d,32)).getDate()},_getFirstDayOfMonth:function(a,d){return(new Date(a,d,1)).getDay()},_canAdjustMonth:function(a,d,h,i){var j=this._getNumberOfMonths(a);\nh=this._daylightSavingAdjust(new Date(h,i+(d<0?d:j[0]*j[1]),1));d<0&&h.setDate(this._getDaysInMonth(h.getFullYear(),h.getMonth()));return this._isInRange(a,h)},_isInRange:function(a,d){var h=this._getMinMaxDate(a,"min");a=this._getMinMaxDate(a,"max");return(!h||d.getTime()>=h.getTime())&&(!a||d.getTime()<=a.getTime())},_getFormatConfig:function(a){var d=this._get(a,"shortYearCutoff");d=typeof d!="string"?d:(new Date).getFullYear()%100+parseInt(d,10);return{shortYearCutoff:d,dayNamesShort:this._get(a,\n"dayNamesShort"),dayNames:this._get(a,"dayNames"),monthNamesShort:this._get(a,"monthNamesShort"),monthNames:this._get(a,"monthNames")}},_formatDate:function(a,d,h,i){if(!d){a.currentDay=a.selectedDay;a.currentMonth=a.selectedMonth;a.currentYear=a.selectedYear}d=d?typeof d=="object"?d:this._daylightSavingAdjust(new Date(i,h,d)):this._daylightSavingAdjust(new Date(a.currentYear,a.currentMonth,a.currentDay));return this.formatDate(this._get(a,"dateFormat"),d,this._getFormatConfig(a))}});b.fn.datepicker=\nfunction(a){if(!b.datepicker.initialized){b(document).mousedown(b.datepicker._checkExternalClick).find("body").append(b.datepicker.dpDiv);b.datepicker.initialized=true}var d=Array.prototype.slice.call(arguments,1);if(typeof a=="string"&&(a=="isDisabled"||a=="getDate"||a=="widget"))return b.datepicker["_"+a+"Datepicker"].apply(b.datepicker,[this[0]].concat(d));if(a=="option"&&arguments.length==2&&typeof arguments[1]=="string")return b.datepicker["_"+a+"Datepicker"].apply(b.datepicker,[this[0]].concat(d));\nreturn this.each(function(){typeof a=="string"?b.datepicker["_"+a+"Datepicker"].apply(b.datepicker,[this].concat(d)):b.datepicker._attachDatepicker(this,a)})};b.datepicker=new f;b.datepicker.initialized=false;b.datepicker.uuid=(new Date).getTime();b.datepicker.version="1.8.9";window["DP_jQuery_"+e]=b})(jQuery);\n(function(b,c){var f={buttons:true,height:true,maxHeight:true,maxWidth:true,minHeight:true,minWidth:true,width:true},g={maxHeight:true,maxWidth:true,minHeight:true,minWidth:true};b.widget("ui.dialog",{options:{autoOpen:true,buttons:{},closeOnEscape:true,closeText:"close",dialogClass:"",draggable:true,hide:null,height:"auto",maxHeight:false,maxWidth:false,minHeight:150,minWidth:150,modal:false,position:{my:"center",at:"center",collision:"fit",using:function(e){var a=b(this).css(e).offset().top;a<0&&\nb(this).css("top",e.top-a)}},resizable:true,show:null,stack:true,title:"",width:300,zIndex:1E3},_create:function(){this.originalTitle=this.element.attr("title");if(typeof this.originalTitle!=="string")this.originalTitle="";this.options.title=this.options.title||this.originalTitle;var e=this,a=e.options,d=a.title||" ",h=b.ui.dialog.getTitleId(e.element),i=(e.uiDialog=b("<div></div>")).appendTo(document.body).hide().addClass("ui-dialog ui-widget ui-widget-content ui-corner-all "+a.dialogClass).css({zIndex:a.zIndex}).attr("tabIndex",\n-1).css("outline",0).keydown(function(q){if(a.closeOnEscape&&q.keyCode&&q.keyCode===b.ui.keyCode.ESCAPE){e.close(q);q.preventDefault()}}).attr({role:"dialog","aria-labelledby":h}).mousedown(function(q){e.moveToTop(false,q)});e.element.show().removeAttr("title").addClass("ui-dialog-content ui-widget-content").appendTo(i);var j=(e.uiDialogTitlebar=b("<div></div>")).addClass("ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix").prependTo(i),n=b(\'<a href="#"></a>\').addClass("ui-dialog-titlebar-close ui-corner-all").attr("role",\n"button").hover(function(){n.addClass("ui-state-hover")},function(){n.removeClass("ui-state-hover")}).focus(function(){n.addClass("ui-state-focus")}).blur(function(){n.removeClass("ui-state-focus")}).click(function(q){e.close(q);return false}).appendTo(j);(e.uiDialogTitlebarCloseText=b("<span></span>")).addClass("ui-icon ui-icon-closethick").text(a.closeText).appendTo(n);b("<span></span>").addClass("ui-dialog-title").attr("id",h).html(d).prependTo(j);if(b.isFunction(a.beforeclose)&&!b.isFunction(a.beforeClose))a.beforeClose=\na.beforeclose;j.find("*").add(j).disableSelection();a.draggable&&b.fn.draggable&&e._makeDraggable();a.resizable&&b.fn.resizable&&e._makeResizable();e._createButtons(a.buttons);e._isOpen=false;b.fn.bgiframe&&i.bgiframe()},_init:function(){this.options.autoOpen&&this.open()},destroy:function(){var e=this;e.overlay&&e.overlay.destroy();e.uiDialog.hide();e.element.unbind(".dialog").removeData("dialog").removeClass("ui-dialog-content ui-widget-content").hide().appendTo("body");e.uiDialog.remove();e.originalTitle&&\ne.element.attr("title",e.originalTitle);return e},widget:function(){return this.uiDialog},close:function(e){var a=this,d,h;if(false!==a._trigger("beforeClose",e)){a.overlay&&a.overlay.destroy();a.uiDialog.unbind("keypress.ui-dialog");a._isOpen=false;if(a.options.hide)a.uiDialog.hide(a.options.hide,function(){a._trigger("close",e)});else{a.uiDialog.hide();a._trigger("close",e)}b.ui.dialog.overlay.resize();if(a.options.modal){d=0;b(".ui-dialog").each(function(){if(this!==a.uiDialog[0]){h=b(this).css("z-index");\nisNaN(h)||(d=Math.max(d,h))}});b.ui.dialog.maxZ=d}return a}},isOpen:function(){return this._isOpen},moveToTop:function(e,a){var d=this,h=d.options;if(h.modal&&!e||!h.stack&&!h.modal)return d._trigger("focus",a);if(h.zIndex>b.ui.dialog.maxZ)b.ui.dialog.maxZ=h.zIndex;if(d.overlay){b.ui.dialog.maxZ+=1;d.overlay.$el.css("z-index",b.ui.dialog.overlay.maxZ=b.ui.dialog.maxZ)}e={scrollTop:d.element.attr("scrollTop"),scrollLeft:d.element.attr("scrollLeft")};b.ui.dialog.maxZ+=1;d.uiDialog.css("z-index",b.ui.dialog.maxZ);\nd.element.attr(e);d._trigger("focus",a);return d},open:function(){if(!this._isOpen){var e=this,a=e.options,d=e.uiDialog;e.overlay=a.modal?new b.ui.dialog.overlay(e):null;e._size();e._position(a.position);d.show(a.show);e.moveToTop(true);a.modal&&d.bind("keypress.ui-dialog",function(h){if(h.keyCode===b.ui.keyCode.TAB){var i=b(":tabbable",this),j=i.filter(":first");i=i.filter(":last");if(h.target===i[0]&&!h.shiftKey){j.focus(1);return false}else if(h.target===j[0]&&h.shiftKey){i.focus(1);return false}}});\nb(e.element.find(":tabbable").get().concat(d.find(".ui-dialog-buttonpane :tabbable").get().concat(d.get()))).eq(0).focus();e._isOpen=true;e._trigger("open");return e}},_createButtons:function(e){var a=this,d=false,h=b("<div></div>").addClass("ui-dialog-buttonpane ui-widget-content ui-helper-clearfix"),i=b("<div></div>").addClass("ui-dialog-buttonset").appendTo(h);a.uiDialog.find(".ui-dialog-buttonpane").remove();typeof e==="object"&&e!==null&&b.each(e,function(){return!(d=true)});if(d){b.each(e,function(j,\nn){n=b.isFunction(n)?{click:n,text:j}:n;j=b(\'<button type="button"></button>\').attr(n,true).unbind("click").click(function(){n.click.apply(a.element[0],arguments)}).appendTo(i);b.fn.button&&j.button()});h.appendTo(a.uiDialog)}},_makeDraggable:function(){function e(j){return{position:j.position,offset:j.offset}}var a=this,d=a.options,h=b(document),i;a.uiDialog.draggable({cancel:".ui-dialog-content, .ui-dialog-titlebar-close",handle:".ui-dialog-titlebar",containment:"document",start:function(j,n){i=\nd.height==="auto"?"auto":b(this).height();b(this).height(b(this).height()).addClass("ui-dialog-dragging");a._trigger("dragStart",j,e(n))},drag:function(j,n){a._trigger("drag",j,e(n))},stop:function(j,n){d.position=[n.position.left-h.scrollLeft(),n.position.top-h.scrollTop()];b(this).removeClass("ui-dialog-dragging").height(i);a._trigger("dragStop",j,e(n));b.ui.dialog.overlay.resize()}})},_makeResizable:function(e){function a(j){return{originalPosition:j.originalPosition,originalSize:j.originalSize,\nposition:j.position,size:j.size}}e=e===c?this.options.resizable:e;var d=this,h=d.options,i=d.uiDialog.css("position");e=typeof e==="string"?e:"n,e,s,w,se,sw,ne,nw";d.uiDialog.resizable({cancel:".ui-dialog-content",containment:"document",alsoResize:d.element,maxWidth:h.maxWidth,maxHeight:h.maxHeight,minWidth:h.minWidth,minHeight:d._minHeight(),handles:e,start:function(j,n){b(this).addClass("ui-dialog-resizing");d._trigger("resizeStart",j,a(n))},resize:function(j,n){d._trigger("resize",j,a(n))},stop:function(j,\nn){b(this).removeClass("ui-dialog-resizing");h.height=b(this).height();h.width=b(this).width();d._trigger("resizeStop",j,a(n));b.ui.dialog.overlay.resize()}}).css("position",i).find(".ui-resizable-se").addClass("ui-icon ui-icon-grip-diagonal-se")},_minHeight:function(){var e=this.options;return e.height==="auto"?e.minHeight:Math.min(e.minHeight,e.height)},_position:function(e){var a=[],d=[0,0],h;if(e){if(typeof e==="string"||typeof e==="object"&&"0"in e){a=e.split?e.split(" "):[e[0],e[1]];if(a.length===\n1)a[1]=a[0];b.each(["left","top"],function(i,j){if(+a[i]===a[i]){d[i]=a[i];a[i]=j}});e={my:a.join(" "),at:a.join(" "),offset:d.join(" ")}}e=b.extend({},b.ui.dialog.prototype.options.position,e)}else e=b.ui.dialog.prototype.options.position;(h=this.uiDialog.is(":visible"))||this.uiDialog.show();this.uiDialog.css({top:0,left:0}).position(b.extend({of:window},e));h||this.uiDialog.hide()},_setOptions:function(e){var a=this,d={},h=false;b.each(e,function(i,j){a._setOption(i,j);if(i in f)h=true;if(i in\ng)d[i]=j});h&&this._size();this.uiDialog.is(":data(resizable)")&&this.uiDialog.resizable("option",d)},_setOption:function(e,a){var d=this,h=d.uiDialog;switch(e){case "beforeclose":e="beforeClose";break;case "buttons":d._createButtons(a);break;case "closeText":d.uiDialogTitlebarCloseText.text(""+a);break;case "dialogClass":h.removeClass(d.options.dialogClass).addClass("ui-dialog ui-widget ui-widget-content ui-corner-all "+a);break;case "disabled":a?h.addClass("ui-dialog-disabled"):h.removeClass("ui-dialog-disabled");\nbreak;case "draggable":var i=h.is(":data(draggable)");i&&!a&&h.draggable("destroy");!i&&a&&d._makeDraggable();break;case "position":d._position(a);break;case "resizable":(i=h.is(":data(resizable)"))&&!a&&h.resizable("destroy");i&&typeof a==="string"&&h.resizable("option","handles",a);!i&&a!==false&&d._makeResizable(a);break;case "title":b(".ui-dialog-title",d.uiDialogTitlebar).html(""+(a||" "));break}b.Widget.prototype._setOption.apply(d,arguments)},_size:function(){var e=this.options,a,d,h=\nthis.uiDialog.is(":visible");this.element.show().css({width:"auto",minHeight:0,height:0});if(e.minWidth>e.width)e.width=e.minWidth;a=this.uiDialog.css({height:"auto",width:e.width}).height();d=Math.max(0,e.minHeight-a);if(e.height==="auto")if(b.support.minHeight)this.element.css({minHeight:d,height:"auto"});else{this.uiDialog.show();e=this.element.css("height","auto").height();h||this.uiDialog.hide();this.element.height(Math.max(e,d))}else this.element.height(Math.max(e.height-a,0));this.uiDialog.is(":data(resizable)")&&\nthis.uiDialog.resizable("option","minHeight",this._minHeight())}});b.extend(b.ui.dialog,{version:"1.8.9",uuid:0,maxZ:0,getTitleId:function(e){e=e.attr("id");if(!e){this.uuid+=1;e=this.uuid}return"ui-dialog-title-"+e},overlay:function(e){this.$el=b.ui.dialog.overlay.create(e)}});b.extend(b.ui.dialog.overlay,{instances:[],oldInstances:[],maxZ:0,events:b.map("focus,mousedown,mouseup,keydown,keypress,click".split(","),function(e){return e+".dialog-overlay"}).join(" "),create:function(e){if(this.instances.length===\n0){setTimeout(function(){b.ui.dialog.overlay.instances.length&&b(document).bind(b.ui.dialog.overlay.events,function(d){if(b(d.target).zIndex()<b.ui.dialog.overlay.maxZ)return false})},1);b(document).bind("keydown.dialog-overlay",function(d){if(e.options.closeOnEscape&&d.keyCode&&d.keyCode===b.ui.keyCode.ESCAPE){e.close(d);d.preventDefault()}});b(window).bind("resize.dialog-overlay",b.ui.dialog.overlay.resize)}var a=(this.oldInstances.pop()||b("<div></div>").addClass("ui-widget-overlay")).appendTo(document.body).css({width:this.width(),\nheight:this.height()});b.fn.bgiframe&&a.bgiframe();this.instances.push(a);return a},destroy:function(e){var a=b.inArray(e,this.instances);a!=-1&&this.oldInstances.push(this.instances.splice(a,1)[0]);this.instances.length===0&&b([document,window]).unbind(".dialog-overlay");e.remove();var d=0;b.each(this.instances,function(){d=Math.max(d,this.css("z-index"))});this.maxZ=d},height:function(){var e,a;if(b.browser.msie&&b.browser.version<7){e=Math.max(document.documentElement.scrollHeight,document.body.scrollHeight);\na=Math.max(document.documentElement.offsetHeight,document.body.offsetHeight);return e<a?b(window).height()+"px":e+"px"}else return b(document).height()+"px"},width:function(){var e,a;if(b.browser.msie&&b.browser.version<7){e=Math.max(document.documentElement.scrollWidth,document.body.scrollWidth);a=Math.max(document.documentElement.offsetWidth,document.body.offsetWidth);return e<a?b(window).width()+"px":e+"px"}else return b(document).width()+"px"},resize:function(){var e=b([]);b.each(b.ui.dialog.overlay.instances,\nfunction(){e=e.add(this)});e.css({width:0,height:0}).css({width:b.ui.dialog.overlay.width(),height:b.ui.dialog.overlay.height()})}});b.extend(b.ui.dialog.overlay.prototype,{destroy:function(){b.ui.dialog.overlay.destroy(this.$el)}})})(jQuery);\n(function(b){b.ui=b.ui||{};var c=/left|center|right/,f=/top|center|bottom/,g=b.fn.position,e=b.fn.offset;b.fn.position=function(a){if(!a||!a.of)return g.apply(this,arguments);a=b.extend({},a);var d=b(a.of),h=d[0],i=(a.collision||"flip").split(" "),j=a.offset?a.offset.split(" "):[0,0],n,q,l;if(h.nodeType===9){n=d.width();q=d.height();l={top:0,left:0}}else if(h.setTimeout){n=d.width();q=d.height();l={top:d.scrollTop(),left:d.scrollLeft()}}else if(h.preventDefault){a.at="left top";n=q=0;l={top:a.of.pageY,\nleft:a.of.pageX}}else{n=d.outerWidth();q=d.outerHeight();l=d.offset()}b.each(["my","at"],function(){var k=(a[this]||"").split(" ");if(k.length===1)k=c.test(k[0])?k.concat(["center"]):f.test(k[0])?["center"].concat(k):["center","center"];k[0]=c.test(k[0])?k[0]:"center";k[1]=f.test(k[1])?k[1]:"center";a[this]=k});if(i.length===1)i[1]=i[0];j[0]=parseInt(j[0],10)||0;if(j.length===1)j[1]=j[0];j[1]=parseInt(j[1],10)||0;if(a.at[0]==="right")l.left+=n;else if(a.at[0]==="center")l.left+=n/2;if(a.at[1]==="bottom")l.top+=\nq;else if(a.at[1]==="center")l.top+=q/2;l.left+=j[0];l.top+=j[1];return this.each(function(){var k=b(this),m=k.outerWidth(),o=k.outerHeight(),p=parseInt(b.curCSS(this,"marginLeft",true))||0,s=parseInt(b.curCSS(this,"marginTop",true))||0,r=m+p+(parseInt(b.curCSS(this,"marginRight",true))||0),u=o+s+(parseInt(b.curCSS(this,"marginBottom",true))||0),v=b.extend({},l),w;if(a.my[0]==="right")v.left-=m;else if(a.my[0]==="center")v.left-=m/2;if(a.my[1]==="bottom")v.top-=o;else if(a.my[1]==="center")v.top-=\no/2;v.left=Math.round(v.left);v.top=Math.round(v.top);w={left:v.left-p,top:v.top-s};b.each(["left","top"],function(y,B){b.ui.position[i[y]]&&b.ui.position[i[y]][B](v,{targetWidth:n,targetHeight:q,elemWidth:m,elemHeight:o,collisionPosition:w,collisionWidth:r,collisionHeight:u,offset:j,my:a.my,at:a.at})});b.fn.bgiframe&&k.bgiframe();k.offset(b.extend(v,{using:a.using}))})};b.ui.position={fit:{left:function(a,d){var h=b(window);h=d.collisionPosition.left+d.collisionWidth-h.width()-h.scrollLeft();a.left=\nh>0?a.left-h:Math.max(a.left-d.collisionPosition.left,a.left)},top:function(a,d){var h=b(window);h=d.collisionPosition.top+d.collisionHeight-h.height()-h.scrollTop();a.top=h>0?a.top-h:Math.max(a.top-d.collisionPosition.top,a.top)}},flip:{left:function(a,d){if(d.at[0]!=="center"){var h=b(window);h=d.collisionPosition.left+d.collisionWidth-h.width()-h.scrollLeft();var i=d.my[0]==="left"?-d.elemWidth:d.my[0]==="right"?d.elemWidth:0,j=d.at[0]==="left"?d.targetWidth:-d.targetWidth,n=-2*d.offset[0];a.left+=\nd.collisionPosition.left<0?i+j+n:h>0?i+j+n:0}},top:function(a,d){if(d.at[1]!=="center"){var h=b(window);h=d.collisionPosition.top+d.collisionHeight-h.height()-h.scrollTop();var i=d.my[1]==="top"?-d.elemHeight:d.my[1]==="bottom"?d.elemHeight:0,j=d.at[1]==="top"?d.targetHeight:-d.targetHeight,n=-2*d.offset[1];a.top+=d.collisionPosition.top<0?i+j+n:h>0?i+j+n:0}}}};if(!b.offset.setOffset){b.offset.setOffset=function(a,d){if(/static/.test(b.curCSS(a,"position")))a.style.position="relative";var h=b(a),\ni=h.offset(),j=parseInt(b.curCSS(a,"top",true),10)||0,n=parseInt(b.curCSS(a,"left",true),10)||0;i={top:d.top-i.top+j,left:d.left-i.left+n};"using"in d?d.using.call(a,i):h.css(i)};b.fn.offset=function(a){var d=this[0];if(!d||!d.ownerDocument)return null;if(a)return this.each(function(){b.offset.setOffset(this,a)});return e.call(this)}}})(jQuery);\n(function(b,c){b.widget("ui.progressbar",{options:{value:0,max:100},min:0,_create:function(){this.element.addClass("ui-progressbar ui-widget ui-widget-content ui-corner-all").attr({role:"progressbar","aria-valuemin":this.min,"aria-valuemax":this.options.max,"aria-valuenow":this._value()});this.valueDiv=b("<div class=\'ui-progressbar-value ui-widget-header ui-corner-left\'></div>").appendTo(this.element);this.oldValue=this._value();this._refreshValue()},destroy:function(){this.element.removeClass("ui-progressbar ui-widget ui-widget-content ui-corner-all").removeAttr("role").removeAttr("aria-valuemin").removeAttr("aria-valuemax").removeAttr("aria-valuenow");\nthis.valueDiv.remove();b.Widget.prototype.destroy.apply(this,arguments)},value:function(f){if(f===c)return this._value();this._setOption("value",f);return this},_setOption:function(f,g){if(f==="value"){this.options.value=g;this._refreshValue();this._value()===this.options.max&&this._trigger("complete")}b.Widget.prototype._setOption.apply(this,arguments)},_value:function(){var f=this.options.value;if(typeof f!=="number")f=0;return Math.min(this.options.max,Math.max(this.min,f))},_percentage:function(){return 100*\nthis._value()/this.options.max},_refreshValue:function(){var f=this.value(),g=this._percentage();if(this.oldValue!==f){this.oldValue=f;this._trigger("change")}this.valueDiv.toggleClass("ui-corner-right",f===this.options.max).width(g.toFixed(0)+"%");this.element.attr("aria-valuenow",f)}});b.extend(b.ui.progressbar,{version:"1.8.9"})})(jQuery);\n(function(b){b.widget("ui.slider",b.ui.mouse,{widgetEventPrefix:"slide",options:{animate:false,distance:0,max:100,min:0,orientation:"horizontal",range:false,step:1,value:0,values:null},_create:function(){var c=this,f=this.options;this._mouseSliding=this._keySliding=false;this._animateOff=true;this._handleIndex=null;this._detectOrientation();this._mouseInit();this.element.addClass("ui-slider ui-slider-"+this.orientation+" ui-widget ui-widget-content ui-corner-all");f.disabled&&this.element.addClass("ui-slider-disabled ui-disabled");\nthis.range=b([]);if(f.range){if(f.range===true){this.range=b("<div></div>");if(!f.values)f.values=[this._valueMin(),this._valueMin()];if(f.values.length&&f.values.length!==2)f.values=[f.values[0],f.values[0]]}else this.range=b("<div></div>");this.range.appendTo(this.element).addClass("ui-slider-range");if(f.range==="min"||f.range==="max")this.range.addClass("ui-slider-range-"+f.range);this.range.addClass("ui-widget-header")}b(".ui-slider-handle",this.element).length===0&&b("<a href=\'#\'></a>").appendTo(this.element).addClass("ui-slider-handle");\nif(f.values&&f.values.length)for(;b(".ui-slider-handle",this.element).length<f.values.length;)b("<a href=\'#\'></a>").appendTo(this.element).addClass("ui-slider-handle");this.handles=b(".ui-slider-handle",this.element).addClass("ui-state-default ui-corner-all");this.handle=this.handles.eq(0);this.handles.add(this.range).filter("a").click(function(g){g.preventDefault()}).hover(function(){f.disabled||b(this).addClass("ui-state-hover")},function(){b(this).removeClass("ui-state-hover")}).focus(function(){if(f.disabled)b(this).blur();\nelse{b(".ui-slider .ui-state-focus").removeClass("ui-state-focus");b(this).addClass("ui-state-focus")}}).blur(function(){b(this).removeClass("ui-state-focus")});this.handles.each(function(g){b(this).data("index.ui-slider-handle",g)});this.handles.keydown(function(g){var e=true,a=b(this).data("index.ui-slider-handle"),d,h,i;if(!c.options.disabled){switch(g.keyCode){case b.ui.keyCode.HOME:case b.ui.keyCode.END:case b.ui.keyCode.PAGE_UP:case b.ui.keyCode.PAGE_DOWN:case b.ui.keyCode.UP:case b.ui.keyCode.RIGHT:case b.ui.keyCode.DOWN:case b.ui.keyCode.LEFT:e=\nfalse;if(!c._keySliding){c._keySliding=true;b(this).addClass("ui-state-active");d=c._start(g,a);if(d===false)return}break}i=c.options.step;d=c.options.values&&c.options.values.length?(h=c.values(a)):(h=c.value());switch(g.keyCode){case b.ui.keyCode.HOME:h=c._valueMin();break;case b.ui.keyCode.END:h=c._valueMax();break;case b.ui.keyCode.PAGE_UP:h=c._trimAlignValue(d+(c._valueMax()-c._valueMin())/5);break;case b.ui.keyCode.PAGE_DOWN:h=c._trimAlignValue(d-(c._valueMax()-c._valueMin())/5);break;case b.ui.keyCode.UP:case b.ui.keyCode.RIGHT:if(d===\nc._valueMax())return;h=c._trimAlignValue(d+i);break;case b.ui.keyCode.DOWN:case b.ui.keyCode.LEFT:if(d===c._valueMin())return;h=c._trimAlignValue(d-i);break}c._slide(g,a,h);return e}}).keyup(function(g){var e=b(this).data("index.ui-slider-handle");if(c._keySliding){c._keySliding=false;c._stop(g,e);c._change(g,e);b(this).removeClass("ui-state-active")}});this._refreshValue();this._animateOff=false},destroy:function(){this.handles.remove();this.range.remove();this.element.removeClass("ui-slider ui-slider-horizontal ui-slider-vertical ui-slider-disabled ui-widget ui-widget-content ui-corner-all").removeData("slider").unbind(".slider");\nthis._mouseDestroy();return this},_mouseCapture:function(c){var f=this.options,g,e,a,d,h;if(f.disabled)return false;this.elementSize={width:this.element.outerWidth(),height:this.element.outerHeight()};this.elementOffset=this.element.offset();g=this._normValueFromMouse({x:c.pageX,y:c.pageY});e=this._valueMax()-this._valueMin()+1;d=this;this.handles.each(function(i){var j=Math.abs(g-d.values(i));if(e>j){e=j;a=b(this);h=i}});if(f.range===true&&this.values(1)===f.min){h+=1;a=b(this.handles[h])}if(this._start(c,\nh)===false)return false;this._mouseSliding=true;d._handleIndex=h;a.addClass("ui-state-active").focus();f=a.offset();this._clickOffset=!b(c.target).parents().andSelf().is(".ui-slider-handle")?{left:0,top:0}:{left:c.pageX-f.left-a.width()/2,top:c.pageY-f.top-a.height()/2-(parseInt(a.css("borderTopWidth"),10)||0)-(parseInt(a.css("borderBottomWidth"),10)||0)+(parseInt(a.css("marginTop"),10)||0)};this.handles.hasClass("ui-state-hover")||this._slide(c,h,g);return this._animateOff=true},_mouseStart:function(){return true},\n_mouseDrag:function(c){var f=this._normValueFromMouse({x:c.pageX,y:c.pageY});this._slide(c,this._handleIndex,f);return false},_mouseStop:function(c){this.handles.removeClass("ui-state-active");this._mouseSliding=false;this._stop(c,this._handleIndex);this._change(c,this._handleIndex);this._clickOffset=this._handleIndex=null;return this._animateOff=false},_detectOrientation:function(){this.orientation=this.options.orientation==="vertical"?"vertical":"horizontal"},_normValueFromMouse:function(c){var f;\nif(this.orientation==="horizontal"){f=this.elementSize.width;c=c.x-this.elementOffset.left-(this._clickOffset?this._clickOffset.left:0)}else{f=this.elementSize.height;c=c.y-this.elementOffset.top-(this._clickOffset?this._clickOffset.top:0)}f=c/f;if(f>1)f=1;if(f<0)f=0;if(this.orientation==="vertical")f=1-f;c=this._valueMax()-this._valueMin();return this._trimAlignValue(this._valueMin()+f*c)},_start:function(c,f){var g={handle:this.handles[f],value:this.value()};if(this.options.values&&this.options.values.length){g.value=\nthis.values(f);g.values=this.values()}return this._trigger("start",c,g)},_slide:function(c,f,g){var e;if(this.options.values&&this.options.values.length){e=this.values(f?0:1);if(this.options.values.length===2&&this.options.range===true&&(f===0&&g>e||f===1&&g<e))g=e;if(g!==this.values(f)){e=this.values();e[f]=g;c=this._trigger("slide",c,{handle:this.handles[f],value:g,values:e});this.values(f?0:1);c!==false&&this.values(f,g,true)}}else if(g!==this.value()){c=this._trigger("slide",c,{handle:this.handles[f],\nvalue:g});c!==false&&this.value(g)}},_stop:function(c,f){var g={handle:this.handles[f],value:this.value()};if(this.options.values&&this.options.values.length){g.value=this.values(f);g.values=this.values()}this._trigger("stop",c,g)},_change:function(c,f){if(!this._keySliding&&!this._mouseSliding){var g={handle:this.handles[f],value:this.value()};if(this.options.values&&this.options.values.length){g.value=this.values(f);g.values=this.values()}this._trigger("change",c,g)}},value:function(c){if(arguments.length){this.options.value=\nthis._trimAlignValue(c);this._refreshValue();this._change(null,0)}return this._value()},values:function(c,f){var g,e,a;if(arguments.length>1){this.options.values[c]=this._trimAlignValue(f);this._refreshValue();this._change(null,c)}if(arguments.length)if(b.isArray(arguments[0])){g=this.options.values;e=arguments[0];for(a=0;a<g.length;a+=1){g[a]=this._trimAlignValue(e[a]);this._change(null,a)}this._refreshValue()}else return this.options.values&&this.options.values.length?this._values(c):this.value();\nelse return this._values()},_setOption:function(c,f){var g,e=0;if(b.isArray(this.options.values))e=this.options.values.length;b.Widget.prototype._setOption.apply(this,arguments);switch(c){case "disabled":if(f){this.handles.filter(".ui-state-focus").blur();this.handles.removeClass("ui-state-hover");this.handles.attr("disabled","disabled");this.element.addClass("ui-disabled")}else{this.handles.removeAttr("disabled");this.element.removeClass("ui-disabled")}break;case "orientation":this._detectOrientation();\nthis.element.removeClass("ui-slider-horizontal ui-slider-vertical").addClass("ui-slider-"+this.orientation);this._refreshValue();break;case "value":this._animateOff=true;this._refreshValue();this._change(null,0);this._animateOff=false;break;case "values":this._animateOff=true;this._refreshValue();for(g=0;g<e;g+=1)this._change(null,g);this._animateOff=false;break}},_value:function(){var c=this.options.value;return c=this._trimAlignValue(c)},_values:function(c){var f,g;if(arguments.length){f=this.options.values[c];\nreturn f=this._trimAlignValue(f)}else{f=this.options.values.slice();for(g=0;g<f.length;g+=1)f[g]=this._trimAlignValue(f[g]);return f}},_trimAlignValue:function(c){if(c<=this._valueMin())return this._valueMin();if(c>=this._valueMax())return this._valueMax();var f=this.options.step>0?this.options.step:1,g=(c-this._valueMin())%f;alignValue=c-g;if(Math.abs(g)*2>=f)alignValue+=g>0?f:-f;return parseFloat(alignValue.toFixed(5))},_valueMin:function(){return this.options.min},_valueMax:function(){return this.options.max},\n_refreshValue:function(){var c=this.options.range,f=this.options,g=this,e=!this._animateOff?f.animate:false,a,d={},h,i,j,n;if(this.options.values&&this.options.values.length)this.handles.each(function(q){a=(g.values(q)-g._valueMin())/(g._valueMax()-g._valueMin())*100;d[g.orientation==="horizontal"?"left":"bottom"]=a+"%";b(this).stop(1,1)[e?"animate":"css"](d,f.animate);if(g.options.range===true)if(g.orientation==="horizontal"){if(q===0)g.range.stop(1,1)[e?"animate":"css"]({left:a+"%"},f.animate);\nif(q===1)g.range[e?"animate":"css"]({width:a-h+"%"},{queue:false,duration:f.animate})}else{if(q===0)g.range.stop(1,1)[e?"animate":"css"]({bottom:a+"%"},f.animate);if(q===1)g.range[e?"animate":"css"]({height:a-h+"%"},{queue:false,duration:f.animate})}h=a});else{i=this.value();j=this._valueMin();n=this._valueMax();a=n!==j?(i-j)/(n-j)*100:0;d[g.orientation==="horizontal"?"left":"bottom"]=a+"%";this.handle.stop(1,1)[e?"animate":"css"](d,f.animate);if(c==="min"&&this.orientation==="horizontal")this.range.stop(1,\n1)[e?"animate":"css"]({width:a+"%"},f.animate);if(c==="max"&&this.orientation==="horizontal")this.range[e?"animate":"css"]({width:100-a+"%"},{queue:false,duration:f.animate});if(c==="min"&&this.orientation==="vertical")this.range.stop(1,1)[e?"animate":"css"]({height:a+"%"},f.animate);if(c==="max"&&this.orientation==="vertical")this.range[e?"animate":"css"]({height:100-a+"%"},{queue:false,duration:f.animate})}}});b.extend(b.ui.slider,{version:"1.8.9"})})(jQuery);\n(function(b,c){function f(){return++e}function g(){return++a}var e=0,a=0;b.widget("ui.tabs",{options:{add:null,ajaxOptions:null,cache:false,cookie:null,collapsible:false,disable:null,disabled:[],enable:null,event:"click",fx:null,idPrefix:"ui-tabs-",load:null,panelTemplate:"<div></div>",remove:null,select:null,show:null,spinner:"<em>Loading…</em>",tabTemplate:"<li><a href=\'#{href}\'><span>#{label}</span></a></li>"},_create:function(){this._tabify(true)},_setOption:function(d,h){if(d=="selected")this.options.collapsible&&\nh==this.options.selected||this.select(h);else{this.options[d]=h;this._tabify()}},_tabId:function(d){return d.title&&d.title.replace(/\\s/g,"_").replace(/[^\\w\\u00c0-\\uFFFF-]/g,"")||this.options.idPrefix+f()},_sanitizeSelector:function(d){return d.replace(/:/g,"\\\\:")},_cookie:function(){var d=this.cookie||(this.cookie=this.options.cookie.name||"ui-tabs-"+g());return b.cookie.apply(null,[d].concat(b.makeArray(arguments)))},_ui:function(d,h){return{tab:d,panel:h,index:this.anchors.index(d)}},_cleanup:function(){this.lis.filter(".ui-state-processing").removeClass("ui-state-processing").find("span:data(label.tabs)").each(function(){var d=\nb(this);d.html(d.data("label.tabs")).removeData("label.tabs")})},_tabify:function(d){function h(r,u){r.css("display","");!b.support.opacity&&u.opacity&&r[0].style.removeAttribute("filter")}var i=this,j=this.options,n=/^#.+/;this.list=this.element.find("ol,ul").eq(0);this.lis=b(" > li:has(a[href])",this.list);this.anchors=this.lis.map(function(){return b("a",this)[0]});this.panels=b([]);this.anchors.each(function(r,u){var v=b(u).attr("href"),w=v.split("#")[0],y;if(w&&(w===location.toString().split("#")[0]||\n(y=b("base")[0])&&w===y.href)){v=u.hash;u.href=v}if(n.test(v))i.panels=i.panels.add(i.element.find(i._sanitizeSelector(v)));else if(v&&v!=="#"){b.data(u,"href.tabs",v);b.data(u,"load.tabs",v.replace(/#.*$/,""));v=i._tabId(u);u.href="#"+v;u=i.element.find("#"+v);if(!u.length){u=b(j.panelTemplate).attr("id",v).addClass("ui-tabs-panel ui-widget-content ui-corner-bottom").insertAfter(i.panels[r-1]||i.list);u.data("destroy.tabs",true)}i.panels=i.panels.add(u)}else j.disabled.push(r)});if(d){this.element.addClass("ui-tabs ui-widget ui-widget-content ui-corner-all");\nthis.list.addClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all");this.lis.addClass("ui-state-default ui-corner-top");this.panels.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom");if(j.selected===c){location.hash&&this.anchors.each(function(r,u){if(u.hash==location.hash){j.selected=r;return false}});if(typeof j.selected!=="number"&&j.cookie)j.selected=parseInt(i._cookie(),10);if(typeof j.selected!=="number"&&this.lis.filter(".ui-tabs-selected").length)j.selected=\nthis.lis.index(this.lis.filter(".ui-tabs-selected"));j.selected=j.selected||(this.lis.length?0:-1)}else if(j.selected===null)j.selected=-1;j.selected=j.selected>=0&&this.anchors[j.selected]||j.selected<0?j.selected:0;j.disabled=b.unique(j.disabled.concat(b.map(this.lis.filter(".ui-state-disabled"),function(r){return i.lis.index(r)}))).sort();b.inArray(j.selected,j.disabled)!=-1&&j.disabled.splice(b.inArray(j.selected,j.disabled),1);this.panels.addClass("ui-tabs-hide");this.lis.removeClass("ui-tabs-selected ui-state-active");\nif(j.selected>=0&&this.anchors.length){i.element.find(i._sanitizeSelector(i.anchors[j.selected].hash)).removeClass("ui-tabs-hide");this.lis.eq(j.selected).addClass("ui-tabs-selected ui-state-active");i.element.queue("tabs",function(){i._trigger("show",null,i._ui(i.anchors[j.selected],i.element.find(i._sanitizeSelector(i.anchors[j.selected].hash))[0]))});this.load(j.selected)}b(window).bind("unload",function(){i.lis.add(i.anchors).unbind(".tabs");i.lis=i.anchors=i.panels=null})}else j.selected=this.lis.index(this.lis.filter(".ui-tabs-selected"));\nthis.element[j.collapsible?"addClass":"removeClass"]("ui-tabs-collapsible");j.cookie&&this._cookie(j.selected,j.cookie);d=0;for(var q;q=this.lis[d];d++)b(q)[b.inArray(d,j.disabled)!=-1&&!b(q).hasClass("ui-tabs-selected")?"addClass":"removeClass"]("ui-state-disabled");j.cache===false&&this.anchors.removeData("cache.tabs");this.lis.add(this.anchors).unbind(".tabs");if(j.event!=="mouseover"){var l=function(r,u){u.is(":not(.ui-state-disabled)")&&u.addClass("ui-state-"+r)},k=function(r,u){u.removeClass("ui-state-"+\nr)};this.lis.bind("mouseover.tabs",function(){l("hover",b(this))});this.lis.bind("mouseout.tabs",function(){k("hover",b(this))});this.anchors.bind("focus.tabs",function(){l("focus",b(this).closest("li"))});this.anchors.bind("blur.tabs",function(){k("focus",b(this).closest("li"))})}var m,o;if(j.fx)if(b.isArray(j.fx)){m=j.fx[0];o=j.fx[1]}else m=o=j.fx;var p=o?function(r,u){b(r).closest("li").addClass("ui-tabs-selected ui-state-active");u.hide().removeClass("ui-tabs-hide").animate(o,o.duration||"normal",\nfunction(){h(u,o);i._trigger("show",null,i._ui(r,u[0]))})}:function(r,u){b(r).closest("li").addClass("ui-tabs-selected ui-state-active");u.removeClass("ui-tabs-hide");i._trigger("show",null,i._ui(r,u[0]))},s=m?function(r,u){u.animate(m,m.duration||"normal",function(){i.lis.removeClass("ui-tabs-selected ui-state-active");u.addClass("ui-tabs-hide");h(u,m);i.element.dequeue("tabs")})}:function(r,u){i.lis.removeClass("ui-tabs-selected ui-state-active");u.addClass("ui-tabs-hide");i.element.dequeue("tabs")};\nthis.anchors.bind(j.event+".tabs",function(){var r=this,u=b(r).closest("li"),v=i.panels.filter(":not(.ui-tabs-hide)"),w=i.element.find(i._sanitizeSelector(r.hash));if(u.hasClass("ui-tabs-selected")&&!j.collapsible||u.hasClass("ui-state-disabled")||u.hasClass("ui-state-processing")||i.panels.filter(":animated").length||i._trigger("select",null,i._ui(this,w[0]))===false){this.blur();return false}j.selected=i.anchors.index(this);i.abort();if(j.collapsible)if(u.hasClass("ui-tabs-selected")){j.selected=\n-1;j.cookie&&i._cookie(j.selected,j.cookie);i.element.queue("tabs",function(){s(r,v)}).dequeue("tabs");this.blur();return false}else if(!v.length){j.cookie&&i._cookie(j.selected,j.cookie);i.element.queue("tabs",function(){p(r,w)});i.load(i.anchors.index(this));this.blur();return false}j.cookie&&i._cookie(j.selected,j.cookie);if(w.length){v.length&&i.element.queue("tabs",function(){s(r,v)});i.element.queue("tabs",function(){p(r,w)});i.load(i.anchors.index(this))}else throw"jQuery UI Tabs: Mismatching fragment identifier.";\nb.browser.msie&&this.blur()});this.anchors.bind("click.tabs",function(){return false})},_getIndex:function(d){if(typeof d=="string")d=this.anchors.index(this.anchors.filter("[href$="+d+"]"));return d},destroy:function(){var d=this.options;this.abort();this.element.unbind(".tabs").removeClass("ui-tabs ui-widget ui-widget-content ui-corner-all ui-tabs-collapsible").removeData("tabs");this.list.removeClass("ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all");this.anchors.each(function(){var h=\nb.data(this,"href.tabs");if(h)this.href=h;var i=b(this).unbind(".tabs");b.each(["href","load","cache"],function(j,n){i.removeData(n+".tabs")})});this.lis.unbind(".tabs").add(this.panels).each(function(){b.data(this,"destroy.tabs")?b(this).remove():b(this).removeClass("ui-state-default ui-corner-top ui-tabs-selected ui-state-active ui-state-hover ui-state-focus ui-state-disabled ui-tabs-panel ui-widget-content ui-corner-bottom ui-tabs-hide")});d.cookie&&this._cookie(null,d.cookie);return this},add:function(d,\nh,i){if(i===c)i=this.anchors.length;var j=this,n=this.options;h=b(n.tabTemplate.replace(/#\\{href\\}/g,d).replace(/#\\{label\\}/g,h));d=!d.indexOf("#")?d.replace("#",""):this._tabId(b("a",h)[0]);h.addClass("ui-state-default ui-corner-top").data("destroy.tabs",true);var q=j.element.find("#"+d);q.length||(q=b(n.panelTemplate).attr("id",d).data("destroy.tabs",true));q.addClass("ui-tabs-panel ui-widget-content ui-corner-bottom ui-tabs-hide");if(i>=this.lis.length){h.appendTo(this.list);q.appendTo(this.list[0].parentNode)}else{h.insertBefore(this.lis[i]);\nq.insertBefore(this.panels[i])}n.disabled=b.map(n.disabled,function(l){return l>=i?++l:l});this._tabify();if(this.anchors.length==1){n.selected=0;h.addClass("ui-tabs-selected ui-state-active");q.removeClass("ui-tabs-hide");this.element.queue("tabs",function(){j._trigger("show",null,j._ui(j.anchors[0],j.panels[0]))});this.load(0)}this._trigger("add",null,this._ui(this.anchors[i],this.panels[i]));return this},remove:function(d){d=this._getIndex(d);var h=this.options,i=this.lis.eq(d).remove(),j=this.panels.eq(d).remove();\nif(i.hasClass("ui-tabs-selected")&&this.anchors.length>1)this.select(d+(d+1<this.anchors.length?1:-1));h.disabled=b.map(b.grep(h.disabled,function(n){return n!=d}),function(n){return n>=d?--n:n});this._tabify();this._trigger("remove",null,this._ui(i.find("a")[0],j[0]));return this},enable:function(d){d=this._getIndex(d);var h=this.options;if(b.inArray(d,h.disabled)!=-1){this.lis.eq(d).removeClass("ui-state-disabled");h.disabled=b.grep(h.disabled,function(i){return i!=d});this._trigger("enable",null,\nthis._ui(this.anchors[d],this.panels[d]));return this}},disable:function(d){d=this._getIndex(d);var h=this.options;if(d!=h.selected){this.lis.eq(d).addClass("ui-state-disabled");h.disabled.push(d);h.disabled.sort();this._trigger("disable",null,this._ui(this.anchors[d],this.panels[d]))}return this},select:function(d){d=this._getIndex(d);if(d==-1)if(this.options.collapsible&&this.options.selected!=-1)d=this.options.selected;else return this;this.anchors.eq(d).trigger(this.options.event+".tabs");return this},\nload:function(d){d=this._getIndex(d);var h=this,i=this.options,j=this.anchors.eq(d)[0],n=b.data(j,"load.tabs");this.abort();if(!n||this.element.queue("tabs").length!==0&&b.data(j,"cache.tabs"))this.element.dequeue("tabs");else{this.lis.eq(d).addClass("ui-state-processing");if(i.spinner){var q=b("span",j);q.data("label.tabs",q.html()).html(i.spinner)}this.xhr=b.ajax(b.extend({},i.ajaxOptions,{url:n,success:function(l,k){h.element.find(h._sanitizeSelector(j.hash)).html(l);h._cleanup();i.cache&&b.data(j,\n"cache.tabs",true);h._trigger("load",null,h._ui(h.anchors[d],h.panels[d]));try{i.ajaxOptions.success(l,k)}catch(m){}},error:function(l,k){h._cleanup();h._trigger("load",null,h._ui(h.anchors[d],h.panels[d]));try{i.ajaxOptions.error(l,k,d,j)}catch(m){}}}));h.element.dequeue("tabs");return this}},abort:function(){this.element.queue([]);this.panels.stop(false,true);this.element.queue("tabs",this.element.queue("tabs").splice(-2,2));if(this.xhr){this.xhr.abort();delete this.xhr}this._cleanup();return this},\nurl:function(d,h){this.anchors.eq(d).removeData("cache.tabs").data("load.tabs",h);return this},length:function(){return this.anchors.length}});b.extend(b.ui.tabs,{version:"1.8.9"});b.extend(b.ui.tabs.prototype,{rotation:null,rotate:function(d,h){var i=this,j=this.options,n=i._rotate||(i._rotate=function(q){clearTimeout(i.rotation);i.rotation=setTimeout(function(){var l=j.selected;i.select(++l<i.anchors.length?l:0)},d);q&&q.stopPropagation()});h=i._unrotate||(i._unrotate=!h?function(q){q.clientX&&\ni.rotate(null)}:function(){t=j.selected;n()});if(d){this.element.bind("tabsshow",n);this.anchors.bind(j.event+".tabs",h);n()}else{clearTimeout(i.rotation);this.element.unbind("tabsshow",n);this.anchors.unbind(j.event+".tabs",h);delete this._rotate;delete this._unrotate}return this}})})(jQuery);\n
'''
browserSel = '''/* CSS Browser Selector v0.4.0 (Nov 02, 2010) Rafael Lima (http://rafael.adm.br) */function css_browser_selector(u){var ua=u.toLowerCase(),is=function(t){return ua.indexOf(t)>-1},g='gecko',w='webkit',s='safari',o='opera',m='mobile',h=document.documentElement,b=[(!(/opera|webtv/i.test(ua))&&/msie\s(\d)/.test(ua))?('ie ie'+RegExp.$1):is('firefox/2')?g+' ff2':is('firefox/3.5')?g+' ff3 ff3_5':is('firefox/3.6')?g+' ff3 ff3_6':is('firefox/3')?g+' ff3':is('gecko/')?g:is('opera')?o+(/version\/(\d+)/.test(ua)?' '+o+RegExp.$1:(/opera(\s|\/)(\d+)/.test(ua)?' '+o+RegExp.$2:'')):is('konqueror')?'konqueror':is('blackberry')?m+' blackberry':is('android')?m+' android':is('chrome')?w+' chrome':is('iron')?w+' iron':is('applewebkit/')?w+' '+s+(/version\/(\d+)/.test(ua)?' '+s+RegExp.$1:''):is('mozilla/')?g:'',is('j2me')?m+' j2me':is('iphone')?m+' iphone':is('ipod')?m+' ipod':is('ipad')?m+' ipad':is('mac')?'mac':is('darwin')?'mac':is('webtv')?'webtv':is('win')?'win'+(is('windows nt 6.0')?' vista':''):is('freebsd')?'freebsd':(is('x11')||is('linux'))?'linux':'','js']; c = b.join(' '); h.className += ' '+c; return c;}; css_browser_selector(navigator.userAgent);'''
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/js.py
|
js.py
|
import copy
from anki.utils import intTime, ids2str, json
from anki.hooks import runHook
from anki.consts import *
from anki.lang import _
from anki.errors import DeckRenameError
# fixmes:
# - make sure users can't set grad interval < 1
defaultDeck = {
'newToday': [0, 0], # currentDay, count
'revToday': [0, 0],
'lrnToday': [0, 0],
'timeToday': [0, 0], # time in ms
'conf': 1,
'usn': 0,
'desc': "",
'dyn': 0, # anki uses int/bool interchangably here
'collapsed': False,
# added in beta11
'extendNew': 10,
'extendRev': 50,
}
defaultDynamicDeck = {
'newToday': [0, 0],
'revToday': [0, 0],
'lrnToday': [0, 0],
'timeToday': [0, 0],
'collapsed': False,
'dyn': 1,
'desc': "",
'usn': 0,
'delays': None,
'separate': True,
# list of (search, limit, order); we only use first element for now
'terms': [["", 100, 0]],
'resched': True,
'return': True, # currently unused
}
defaultConf = {
'name': _("Default"),
'new': {
'delays': [1, 10],
'ints': [1, 4, 7], # 7 is not currently used
'initialFactor': 2500,
'separate': True,
'order': NEW_CARDS_DUE,
'perDay': 20,
# may not be set on old decks
'bury': True,
},
'lapse': {
'delays': [10],
'mult': 0,
'minInt': 1,
'leechFails': 8,
# type 0=suspend, 1=tagonly
'leechAction': 0,
},
'rev': {
'perDay': 100,
'ease4': 1.3,
'fuzz': 0.05,
'minSpace': 1, # not currently used
'ivlFct': 1,
'maxIvl': 36500,
# may not be set on old decks
'bury': True,
},
'maxTaken': 60,
'timer': 0,
'autoplay': True,
'replayq': True,
'mod': 0,
'usn': 0,
}
class DeckManager(object):
# Registry save/load
#############################################################
def __init__(self, col):
self.col = col
def load(self, decks, dconf):
self.decks = json.loads(decks)
self.dconf = json.loads(dconf)
self.changed = False
def save(self, g=None):
"Can be called with either a deck or a deck configuration."
if g:
g['mod'] = intTime()
g['usn'] = self.col.usn()
self.changed = True
def flush(self):
if self.changed:
self.col.db.execute("update col set decks=?, dconf=?",
json.dumps(self.decks),
json.dumps(self.dconf))
self.changed = False
# Deck save/load
#############################################################
def id(self, name, create=True, type=defaultDeck):
"Add a deck with NAME. Reuse deck if already exists. Return id as int."
name = name.replace('"', '')
for id, g in self.decks.items():
if g['name'].lower() == name.lower():
return int(id)
if not create:
return None
g = copy.deepcopy(type)
if "::" in name:
# not top level; ensure all parents exist
name = self._ensureParents(name)
g['name'] = name
while 1:
id = intTime(1000)
if str(id) not in self.decks:
break
g['id'] = id
self.decks[str(id)] = g
self.save(g)
self.maybeAddToActive()
runHook("newDeck")
return int(id)
def rem(self, did, cardsToo=False, childrenToo=True):
"Remove the deck. If cardsToo, delete any cards inside."
if str(did) == '1':
# we won't allow the default deck to be deleted, but if it's a
# child of an existing deck then it needs to be renamed
deck = self.get(did)
if '::' in deck['name']:
deck['name'] = _("Default")
self.save(deck)
return
# log the removal regardless of whether we have the deck or not
self.col._logRem([did], REM_DECK)
# do nothing else if doesn't exist
if not str(did) in self.decks:
return
deck = self.get(did)
if deck['dyn']:
# deleting a cramming deck returns cards to their previous deck
# rather than deleting the cards
self.col.sched.emptyDyn(did)
if childrenToo:
for name, id in self.children(did):
self.rem(id, cardsToo)
else:
# delete children first
if childrenToo:
# we don't want to delete children when syncing
for name, id in self.children(did):
self.rem(id, cardsToo)
# delete cards too?
if cardsToo:
# don't use cids(), as we want cards in cram decks too
cids = self.col.db.list(
"select id from cards where did=? or odid=?", did, did)
self.col.remCards(cids)
# delete the deck and add a grave
del self.decks[str(did)]
# ensure we have an active deck
if did in self.active():
self.select(int(self.decks.keys()[0]))
self.save()
def allNames(self, dyn=True):
"An unsorted list of all deck names."
if dyn:
return [x['name'] for x in self.decks.values()]
else:
return [x['name'] for x in self.decks.values() if not x['dyn']]
def all(self):
"A list of all decks."
return self.decks.values()
def allIds(self):
return self.decks.keys()
def collapse(self, did):
deck = self.get(did)
deck['collapsed'] = not deck['collapsed']
self.save(deck)
def collapseBrowser(self, did):
deck = self.get(did)
collapsed = deck.get('browserCollapsed', False)
deck['browserCollapsed'] = not collapsed
self.save(deck)
def count(self):
return len(self.decks)
def get(self, did, default=True):
id = str(did)
if id in self.decks:
return self.decks[id]
elif default:
return self.decks['1']
def byName(self, name):
"Get deck with NAME."
for m in self.decks.values():
if m['name'] == name:
return m
def update(self, g):
"Add or update an existing deck. Used for syncing and merging."
self.decks[str(g['id'])] = g
self.maybeAddToActive()
# mark registry changed, but don't bump mod time
self.save()
def rename(self, g, newName):
"Rename deck prefix to NAME if not exists. Updates children."
# make sure target node doesn't already exist
if newName in self.allNames():
raise DeckRenameError(_("That deck already exists."))
# ensure we have parents
newName = self._ensureParents(newName)
# make sure we're not nesting under a filtered deck
if '::' in newName:
newParent = '::'.join(newName.split('::')[:-1])
if self.byName(newParent)['dyn']:
raise DeckRenameError(_("A filtered deck cannot have subdecks."))
# rename children
for grp in self.all():
if grp['name'].startswith(g['name'] + "::"):
grp['name'] = grp['name'].replace(g['name']+ "::",
newName + "::", 1)
self.save(grp)
# adjust name
g['name'] = newName
# ensure we have parents again, as we may have renamed parent->child
newName = self._ensureParents(newName)
self.save(g)
# renaming may have altered active did order
self.maybeAddToActive()
def renameForDragAndDrop(self, draggedDeckDid, ontoDeckDid):
draggedDeck = self.get(draggedDeckDid)
draggedDeckName = draggedDeck['name']
ontoDeckName = self.get(ontoDeckDid)['name']
if ontoDeckDid == None or ontoDeckDid == '':
if len(self._path(draggedDeckName)) > 1:
self.rename(draggedDeck, self._basename(draggedDeckName))
elif self._canDragAndDrop(draggedDeckName, ontoDeckName):
draggedDeck = self.get(draggedDeckDid)
draggedDeckName = draggedDeck['name']
ontoDeckName = self.get(ontoDeckDid)['name']
self.rename(draggedDeck, ontoDeckName + "::" + self._basename(draggedDeckName))
def _canDragAndDrop(self, draggedDeckName, ontoDeckName):
if draggedDeckName == ontoDeckName \
or self._isParent(ontoDeckName, draggedDeckName) \
or self._isAncestor(draggedDeckName, ontoDeckName):
return False
else:
return True
def _isParent(self, parentDeckName, childDeckName):
return self._path(childDeckName) == self._path(parentDeckName) + [ self._basename(childDeckName) ]
def _isAncestor(self, ancestorDeckName, descendantDeckName):
ancestorPath = self._path(ancestorDeckName)
return ancestorPath == self._path(descendantDeckName)[0:len(ancestorPath)]
def _path(self, name):
return name.split("::")
def _basename(self, name):
return self._path(name)[-1]
def _ensureParents(self, name):
"Ensure parents exist, and return name with case matching parents."
s = ""
path = self._path(name)
if len(path) < 2:
return name
for p in path[:-1]:
if not s:
s += p
else:
s += "::" + p
# fetch or create
did = self.id(s)
# get original case
s = self.name(did)
name = s + "::" + path[-1]
return name
# Deck configurations
#############################################################
def allConf(self):
"A list of all deck config."
return self.dconf.values()
def confForDid(self, did):
deck = self.get(did, default=False)
assert deck
if 'conf' in deck:
conf = self.getConf(deck['conf'])
conf['dyn'] = False
return conf
# dynamic decks have embedded conf
return deck
def getConf(self, confId):
return self.dconf[str(confId)]
def updateConf(self, g):
self.dconf[str(g['id'])] = g
self.save()
def confId(self, name, cloneFrom=defaultConf):
"Create a new configuration and return id."
c = copy.deepcopy(cloneFrom)
while 1:
id = intTime(1000)
if str(id) not in self.dconf:
break
c['id'] = id
c['name'] = name
self.dconf[str(id)] = c
self.save(c)
return id
def remConf(self, id):
"Remove a configuration and update all decks using it."
assert int(id) != 1
self.col.modSchema(check=True)
del self.dconf[str(id)]
for g in self.all():
# ignore cram decks
if 'conf' not in g:
continue
if str(g['conf']) == str(id):
g['conf'] = 1
self.save(g)
def setConf(self, grp, id):
grp['conf'] = id
self.save(grp)
def didsForConf(self, conf):
dids = []
for deck in self.decks.values():
if 'conf' in deck and deck['conf'] == conf['id']:
dids.append(deck['id'])
return dids
def restoreToDefault(self, conf):
oldOrder = conf['new']['order']
new = copy.deepcopy(defaultConf)
new['id'] = conf['id']
new['name'] = conf['name']
self.dconf[str(conf['id'])] = new
self.save(new)
# if it was previously randomized, resort
if not oldOrder:
self.col.sched.resortConf(new)
# Deck utils
#############################################################
def name(self, did, default=False):
deck = self.get(did, default=default)
if deck:
return deck['name']
return _("[no deck]")
def nameOrNone(self, did):
deck = self.get(did, default=False)
if deck:
return deck['name']
return None
def setDeck(self, cids, did):
self.col.db.execute(
"update cards set did=?,usn=?,mod=? where id in "+
ids2str(cids), did, self.col.usn(), intTime())
def maybeAddToActive(self):
# reselect current deck, or default if current has disappeared
c = self.current()
self.select(c['id'])
def cids(self, did, children=False):
if not children:
return self.col.db.list("select id from cards where did=?", did)
dids = [did]
for name, id in self.children(did):
dids.append(id)
return self.col.db.list("select id from cards where did in "+
ids2str(dids))
def recoverOrphans(self):
dids = self.decks.keys()
mod = self.col.db.mod
self.col.db.execute("update cards set did = 1 where did not in "+
ids2str(dids))
self.col.db.mod = mod
# Deck selection
#############################################################
def active(self):
"The currrently active dids. Make sure to copy before modifying."
return self.col.conf['activeDecks']
def selected(self):
"The currently selected did."
return self.col.conf['curDeck']
def current(self):
return self.get(self.selected())
def select(self, did):
"Select a new branch."
# make sure arg is an int
did = int(did)
# current deck
self.col.conf['curDeck'] = did
# and active decks (current + all children)
actv = self.children(did)
actv.sort()
self.col.conf['activeDecks'] = [did] + [a[1] for a in actv]
self.changed = True
def children(self, did):
"All children of did, as (name, id)."
name = self.get(did)['name']
actv = []
for g in self.all():
if g['name'].startswith(name + "::"):
actv.append((g['name'], g['id']))
return actv
def parents(self, did):
"All parents of did."
# get parent and grandparent names
parents = []
for part in self.get(did)['name'].split("::")[:-1]:
if not parents:
parents.append(part)
else:
parents.append(parents[-1] + "::" + part)
# convert to objects
for c, p in enumerate(parents):
parents[c] = self.get(self.id(p))
return parents
# Sync handling
##########################################################################
def beforeUpload(self):
for d in self.all():
d['usn'] = 0
for c in self.allConf():
c['usn'] = 0
self.save()
# Dynamic decks
##########################################################################
def newDyn(self, name):
"Return a new dynamic deck and set it as the current deck."
did = self.id(name, type=defaultDynamicDeck)
self.select(did)
return did
def isDyn(self, did):
return self.get(did)['dyn']
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/decks.py
|
decks.py
|
import re, os, zipfile, shutil
from anki.lang import _
from anki.utils import ids2str, splitFields, json
from anki.hooks import runHook
from anki import Collection
class Exporter(object):
def __init__(self, col, did=None):
self.col = col
self.did = did
def exportInto(self, path):
self._escapeCount = 0
file = open(path, "wb")
self.doExport(file)
file.close()
def escapeText(self, text):
"Escape newlines, tabs, CSS and quotechar."
text = text.replace("\n", "<br>")
text = text.replace("\t", " " * 8)
text = re.sub("(?i)<style>.*?</style>", "", text)
if "\"" in text:
text = "\"" + text.replace("\"", "\"\"") + "\""
return text
def cardIds(self):
if not self.did:
cids = self.col.db.list("select id from cards")
else:
cids = self.col.decks.cids(self.did, children=True)
self.count = len(cids)
return cids
# Cards as TSV
######################################################################
class TextCardExporter(Exporter):
key = _("Cards in Plain Text")
ext = ".txt"
hideTags = True
def __init__(self, col):
Exporter.__init__(self, col)
def doExport(self, file):
ids = sorted(self.cardIds())
strids = ids2str(ids)
def esc(s):
# strip off the repeated question in answer if exists
s = re.sub("(?si)^.*<hr id=answer>\n*", "", s)
return self.escapeText(s)
out = ""
for cid in ids:
c = self.col.getCard(cid)
out += esc(c.q())
out += "\t" + esc(c.a()) + "\n"
file.write(out.encode("utf-8"))
# Notes as TSV
######################################################################
class TextNoteExporter(Exporter):
key = _("Notes in Plain Text")
ext = ".txt"
def __init__(self, col):
Exporter.__init__(self, col)
self.includeID = False
self.includeTags = True
def doExport(self, file):
cardIds = self.cardIds()
data = []
for id, flds, tags in self.col.db.execute("""
select guid, flds, tags from notes
where id in
(select nid from cards
where cards.id in %s)""" % ids2str(cardIds)):
row = []
# note id
if self.includeID:
row.append(str(id))
# fields
row.extend([self.escapeText(f) for f in splitFields(flds)])
# tags
if self.includeTags:
row.append(tags.strip())
data.append("\t".join(row))
self.count = len(data)
out = "\n".join(data)
file.write(out.encode("utf-8"))
# Anki decks
######################################################################
# media files are stored in self.mediaFiles, but not exported.
class AnkiExporter(Exporter):
key = _("Anki 2.0 Deck")
ext = ".anki2"
def __init__(self, col):
Exporter.__init__(self, col)
self.includeSched = False
self.includeMedia = True
def exportInto(self, path):
# create a new collection at the target
try:
os.unlink(path)
except (IOError, OSError):
pass
self.dst = Collection(path)
self.src = self.col
# find cards
if not self.did:
cids = self.src.db.list("select id from cards")
else:
cids = self.src.decks.cids(self.did, children=True)
# copy cards, noting used nids
nids = {}
data = []
for row in self.src.db.execute(
"select * from cards where id in "+ids2str(cids)):
nids[row[1]] = True
data.append(row)
self.dst.db.executemany(
"insert into cards values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
data)
# notes
strnids = ids2str(nids.keys())
notedata = []
for row in self.src.db.all(
"select * from notes where id in "+strnids):
# remove system tags if not exporting scheduling info
if not self.includeSched:
row = list(row)
row[5] = self.removeSystemTags(row[5])
notedata.append(row)
self.dst.db.executemany(
"insert into notes values (?,?,?,?,?,?,?,?,?,?,?)",
notedata)
# models used by the notes
mids = self.dst.db.list("select distinct mid from notes where id in "+
strnids)
# card history and revlog
if self.includeSched:
data = self.src.db.all(
"select * from revlog where cid in "+ids2str(cids))
self.dst.db.executemany(
"insert into revlog values (?,?,?,?,?,?,?,?,?)",
data)
else:
# need to reset card state
self.dst.sched.resetCards(cids)
# models - start with zero
self.dst.models.models = {}
for m in self.src.models.all():
if int(m['id']) in mids:
self.dst.models.update(m)
# decks
if not self.did:
dids = []
else:
dids = [self.did] + [
x[1] for x in self.src.decks.children(self.did)]
dconfs = {}
for d in self.src.decks.all():
if str(d['id']) == "1":
continue
if dids and d['id'] not in dids:
continue
if not d['dyn'] and d['conf'] != 1:
if self.includeSched:
dconfs[d['conf']] = True
if not self.includeSched:
# scheduling not included, so reset deck settings to default
d = dict(d)
d['conf'] = 1
self.dst.decks.update(d)
# copy used deck confs
for dc in self.src.decks.allConf():
if dc['id'] in dconfs:
self.dst.decks.updateConf(dc)
# find used media
media = {}
self.mediaDir = self.src.media.dir()
if self.includeMedia:
for row in notedata:
flds = row[6]
mid = row[2]
for file in self.src.media.filesInStr(mid, flds):
media[file] = True
if self.mediaDir:
for fname in os.listdir(self.mediaDir):
if fname.startswith("_"):
media[fname] = True
self.mediaFiles = media.keys()
self.dst.crt = self.src.crt
# todo: tags?
self.count = self.dst.cardCount()
self.dst.setMod()
self.postExport()
self.dst.close()
def postExport(self):
# overwrite to apply customizations to the deck before it's closed,
# such as update the deck description
pass
def removeSystemTags(self, tags):
return self.src.tags.remFromStr("marked leech", tags)
# Packaged Anki decks
######################################################################
class AnkiPackageExporter(AnkiExporter):
key = _("Anki Deck Package")
ext = ".apkg"
def __init__(self, col):
AnkiExporter.__init__(self, col)
def exportInto(self, path):
# open a zip file
z = zipfile.ZipFile(path, "w", zipfile.ZIP_DEFLATED)
# if all decks and scheduling included, full export
if self.includeSched and not self.did:
media = self.exportVerbatim(z)
else:
# otherwise, filter
media = self.exportFiltered(z, path)
# media map
z.writestr("media", json.dumps(media))
z.close()
def exportFiltered(self, z, path):
# export into the anki2 file
colfile = path.replace(".apkg", ".anki2")
AnkiExporter.exportInto(self, colfile)
z.write(colfile, "collection.anki2")
# and media
self.prepareMedia()
media = {}
for c, file in enumerate(self.mediaFiles):
c = str(c)
mpath = os.path.join(self.mediaDir, file)
if os.path.exists(mpath):
z.write(mpath, c)
media[c] = file
# tidy up intermediate files
os.unlink(colfile)
p = path.replace(".apkg", ".media.db2")
if os.path.exists(p):
os.unlink(p)
os.chdir(self.mediaDir)
shutil.rmtree(path.replace(".apkg", ".media"))
return media
def exportVerbatim(self, z):
# close our deck & write it into the zip file, and reopen
self.count = self.col.cardCount()
self.col.close()
z.write(self.col.path, "collection.anki2")
self.col.reopen()
# copy all media
if not self.includeMedia:
return {}
media = {}
mdir = self.col.media.dir()
for c, file in enumerate(os.listdir(mdir)):
c = str(c)
mpath = os.path.join(mdir, file)
if os.path.exists(mpath):
z.write(mpath, c)
media[c] = file
return media
def prepareMedia(self):
# chance to move each file in self.mediaFiles into place before media
# is zipped up
pass
# Export modules
##########################################################################
def exporters():
def id(obj):
return ("%s (*%s)" % (obj.key, obj.ext), obj)
exps = [
id(AnkiPackageExporter),
id(TextNoteExporter),
id(TextCardExporter),
]
runHook("exportersList", exps)
return exps
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/exporting.py
|
exporting.py
|
import copy, re
from anki.utils import intTime, joinFields, splitFields, ids2str,\
checksum, json
from anki.lang import _
from anki.consts import *
from anki.hooks import runHook
import time
# Models
##########################################################################
# - careful not to add any lists/dicts/etc here, as they aren't deep copied
defaultModel = {
'sortf': 0,
'did': 1,
'latexPre': """\
\\documentclass[12pt]{article}
\\special{papersize=3in,5in}
\\usepackage[utf8]{inputenc}
\\usepackage{amssymb,amsmath}
\\pagestyle{empty}
\\setlength{\\parindent}{0in}
\\begin{document}
""",
'latexPost': "\\end{document}",
'mod': 0,
'usn': 0,
'vers': [], # FIXME: remove when other clients have caught up
'type': MODEL_STD,
'css': """\
.card {
font-family: arial;
font-size: 20px;
text-align: center;
color: black;
background-color: white;
}
"""
}
defaultField = {
'name': "",
'ord': None,
'sticky': False,
# the following alter editing, and are used as defaults for the
# template wizard
'rtl': False,
'font': "Arial",
'size': 20,
# reserved for future use
'media': [],
}
defaultTemplate = {
'name': "",
'ord': None,
'qfmt': "",
'afmt': "",
'did': None,
'bqfmt': "",
'bafmt': "",
# we don't define these so that we pick up system font size until set
#'bfont': "Arial",
#'bsize': 12,
}
class ModelManager(object):
# Saving/loading registry
#############################################################
def __init__(self, col):
self.col = col
def load(self, json_):
"Load registry from JSON."
self.changed = False
self.models = json.loads(json_)
def save(self, m=None, templates=False):
"Mark M modified if provided, and schedule registry flush."
if m and m['id']:
m['mod'] = intTime()
m['usn'] = self.col.usn()
self._updateRequired(m)
if templates:
self._syncTemplates(m)
self.changed = True
runHook("newModel")
def flush(self):
"Flush the registry if any models were changed."
if self.changed:
self.col.db.execute("update col set models = ?",
json.dumps(self.models))
self.changed = False
# Retrieving and creating models
#############################################################
def current(self, forDeck=True):
"Get current model."
m = self.get(self.col.decks.current().get('mid'))
if not forDeck or not m:
m = self.get(self.col.conf['curModel'])
return m or self.models.values()[0]
def setCurrent(self, m):
self.col.conf['curModel'] = m['id']
self.col.setMod()
def get(self, id):
"Get model with ID, or None."
id = str(id)
if id in self.models:
return self.models[id]
def all(self):
"Get all models."
return self.models.values()
def allNames(self):
return [m['name'] for m in self.all()]
def byName(self, name):
"Get model with NAME."
for m in self.models.values():
if m['name'] == name:
return m
def new(self, name):
"Create a new model, save it in the registry, and return it."
# caller should call save() after modifying
m = defaultModel.copy()
m['name'] = name
m['mod'] = intTime()
m['flds'] = []
m['tmpls'] = []
m['tags'] = []
m['id'] = None
return m
def rem(self, m):
"Delete model, and all its cards/notes."
self.col.modSchema(check=True)
current = self.current()['id'] == m['id']
# delete notes/cards
self.col.remCards(self.col.db.list("""
select id from cards where nid in (select id from notes where mid = ?)""",
m['id']))
# then the model
del self.models[str(m['id'])]
self.save()
# GUI should ensure last model is not deleted
if current:
self.setCurrent(self.models.values()[0])
def add(self, m):
self._setID(m)
self.update(m)
self.setCurrent(m)
self.save(m)
def ensureNameUnique(self, m):
for mcur in self.all():
if (mcur['name'] == m['name'] and
mcur['id'] != m['id']):
m['name'] += "-" + checksum(str(time.time()))[:5]
break
def update(self, m):
"Add or update an existing model. Used for syncing and merging."
self.ensureNameUnique(m)
self.models[str(m['id'])] = m
# mark registry changed, but don't bump mod time
self.save()
def _setID(self, m):
while 1:
id = str(intTime(1000))
if id not in self.models:
break
m['id'] = id
def have(self, id):
return str(id) in self.models
def ids(self):
return self.models.keys()
# Tools
##################################################
def nids(self, m):
"Note ids for M."
return self.col.db.list(
"select id from notes where mid = ?", m['id'])
def useCount(self, m):
"Number of note using M."
return self.col.db.scalar(
"select count() from notes where mid = ?", m['id'])
def tmplUseCount(self, m, ord):
return self.col.db.scalar("""
select count() from cards, notes where cards.nid = notes.id
and notes.mid = ? and cards.ord = ?""", m['id'], ord)
# Copying
##################################################
def copy(self, m):
"Copy, save and return."
m2 = copy.deepcopy(m)
m2['name'] = _("%s copy") % m2['name']
self.add(m2)
return m2
# Fields
##################################################
def newField(self, name):
f = defaultField.copy()
f['name'] = name
return f
def fieldMap(self, m):
"Mapping of field name -> (ord, field)."
return dict((f['name'], (f['ord'], f)) for f in m['flds'])
def fieldNames(self, m):
return [f['name'] for f in m['flds']]
def sortIdx(self, m):
return m['sortf']
def setSortIdx(self, m, idx):
assert idx >= 0 and idx < len(m['flds'])
self.col.modSchema(check=True)
m['sortf'] = idx
self.col.updateFieldCache(self.nids(m))
self.save(m)
def addField(self, m, field):
# only mod schema if model isn't new
if m['id']:
self.col.modSchema(check=True)
m['flds'].append(field)
self._updateFieldOrds(m)
self.save(m)
def add(fields):
fields.append("")
return fields
self._transformFields(m, add)
def remField(self, m, field):
self.col.modSchema(check=True)
# save old sort field
sortFldName = m['flds'][m['sortf']]['name']
idx = m['flds'].index(field)
m['flds'].remove(field)
# restore old sort field if possible, or revert to first field
m['sortf'] = 0
for c, f in enumerate(m['flds']):
if f['name'] == sortFldName:
m['sortf'] = c
break
self._updateFieldOrds(m)
def delete(fields):
del fields[idx]
return fields
self._transformFields(m, delete)
if m['flds'][m['sortf']]['name'] != sortFldName:
# need to rebuild sort field
self.col.updateFieldCache(self.nids(m))
# saves
self.renameField(m, field, None)
def moveField(self, m, field, idx):
self.col.modSchema(check=True)
oldidx = m['flds'].index(field)
if oldidx == idx:
return
# remember old sort field
sortf = m['flds'][m['sortf']]
# move
m['flds'].remove(field)
m['flds'].insert(idx, field)
# restore sort field
m['sortf'] = m['flds'].index(sortf)
self._updateFieldOrds(m)
self.save(m)
def move(fields, oldidx=oldidx):
val = fields[oldidx]
del fields[oldidx]
fields.insert(idx, val)
return fields
self._transformFields(m, move)
def renameField(self, m, field, newName):
self.col.modSchema(check=True)
pat = r'{{([^{}]*)([:#^/]|[^:#/^}][^:}]*?:|)%s}}'
def wrap(txt):
def repl(match):
return '{{' + match.group(1) + match.group(2) + txt + '}}'
return repl
for t in m['tmpls']:
for fmt in ('qfmt', 'afmt'):
if newName:
t[fmt] = re.sub(
pat % re.escape(field['name']), wrap(newName), t[fmt])
else:
t[fmt] = re.sub(
pat % re.escape(field['name']), "", t[fmt])
field['name'] = newName
self.save(m)
def _updateFieldOrds(self, m):
for c, f in enumerate(m['flds']):
f['ord'] = c
def _transformFields(self, m, fn):
# model hasn't been added yet?
if not m['id']:
return
r = []
for (id, flds) in self.col.db.execute(
"select id, flds from notes where mid = ?", m['id']):
r.append((joinFields(fn(splitFields(flds))),
intTime(), self.col.usn(), id))
self.col.db.executemany(
"update notes set flds=?,mod=?,usn=? where id = ?", r)
# Templates
##################################################
def newTemplate(self, name):
t = defaultTemplate.copy()
t['name'] = name
return t
def addTemplate(self, m, template):
"Note: should col.genCards() afterwards."
if m['id']:
self.col.modSchema(check=True)
m['tmpls'].append(template)
self._updateTemplOrds(m)
self.save(m)
def remTemplate(self, m, template):
"False if removing template would leave orphan notes."
assert len(m['tmpls']) > 1
# find cards using this template
ord = m['tmpls'].index(template)
cids = self.col.db.list("""
select c.id from cards c, notes f where c.nid=f.id and mid = ? and ord = ?""",
m['id'], ord)
# all notes with this template must have at least two cards, or we
# could end up creating orphaned notes
if self.col.db.scalar("""
select nid, count() from cards where
nid in (select nid from cards where id in %s)
group by nid
having count() < 2
limit 1""" % ids2str(cids)):
return False
# ok to proceed; remove cards
self.col.modSchema(check=True)
self.col.remCards(cids)
# shift ordinals
self.col.db.execute("""
update cards set ord = ord - 1, usn = ?, mod = ?
where nid in (select id from notes where mid = ?) and ord > ?""",
self.col.usn(), intTime(), m['id'], ord)
m['tmpls'].remove(template)
self._updateTemplOrds(m)
self.save(m)
return True
def _updateTemplOrds(self, m):
for c, t in enumerate(m['tmpls']):
t['ord'] = c
def moveTemplate(self, m, template, idx):
oldidx = m['tmpls'].index(template)
if oldidx == idx:
return
oldidxs = dict((id(t), t['ord']) for t in m['tmpls'])
m['tmpls'].remove(template)
m['tmpls'].insert(idx, template)
self._updateTemplOrds(m)
# generate change map
map = []
for t in m['tmpls']:
map.append("when ord = %d then %d" % (oldidxs[id(t)], t['ord']))
# apply
self.save(m)
self.col.db.execute("""
update cards set ord = (case %s end),usn=?,mod=? where nid in (
select id from notes where mid = ?)""" % " ".join(map),
self.col.usn(), intTime(), m['id'])
def _syncTemplates(self, m):
rem = self.col.genCards(self.nids(m))
# Model changing
##########################################################################
# - maps are ord->ord, and there should not be duplicate targets
# - newModel should be self if model is not changing
def change(self, m, nids, newModel, fmap, cmap):
self.col.modSchema(check=True)
assert newModel['id'] == m['id'] or (fmap and cmap)
if fmap:
self._changeNotes(nids, newModel, fmap)
if cmap:
self._changeCards(nids, m, newModel, cmap)
self.col.genCards(nids)
def _changeNotes(self, nids, newModel, map):
d = []
nfields = len(newModel['flds'])
for (nid, flds) in self.col.db.execute(
"select id, flds from notes where id in "+ids2str(nids)):
newflds = {}
flds = splitFields(flds)
for old, new in map.items():
newflds[new] = flds[old]
flds = []
for c in range(nfields):
flds.append(newflds.get(c, ""))
flds = joinFields(flds)
d.append(dict(nid=nid, flds=flds, mid=newModel['id'],
m=intTime(),u=self.col.usn()))
self.col.db.executemany(
"update notes set flds=:flds,mid=:mid,mod=:m,usn=:u where id = :nid", d)
self.col.updateFieldCache(nids)
def _changeCards(self, nids, oldModel, newModel, map):
d = []
deleted = []
for (cid, ord) in self.col.db.execute(
"select id, ord from cards where nid in "+ids2str(nids)):
# if the src model is a cloze, we ignore the map, as the gui
# doesn't currently support mapping them
if oldModel['type'] == MODEL_CLOZE:
new = ord
if newModel['type'] != MODEL_CLOZE:
# if we're mapping to a regular note, we need to check if
# the destination ord is valid
if len(newModel['tmpls']) <= ord:
new = None
else:
# mapping from a regular note, so the map should be valid
new = map[ord]
if new is not None:
d.append(dict(
cid=cid,new=new,u=self.col.usn(),m=intTime()))
else:
deleted.append(cid)
self.col.db.executemany(
"update cards set ord=:new,usn=:u,mod=:m where id=:cid",
d)
self.col.remCards(deleted)
# Schema hash
##########################################################################
def scmhash(self, m):
"Return a hash of the schema, to see if models are compatible."
s = ""
for f in m['flds']:
s += f['name']
for t in m['tmpls']:
s += t['name']
return checksum(s)
# Required field/text cache
##########################################################################
def _updateRequired(self, m):
if m['type'] == MODEL_CLOZE:
# nothing to do
return
req = []
flds = [f['name'] for f in m['flds']]
for t in m['tmpls']:
ret = self._reqForTemplate(m, flds, t)
req.append((t['ord'], ret[0], ret[1]))
m['req'] = req
def _reqForTemplate(self, m, flds, t):
a = []
b = []
for f in flds:
a.append("ankiflag")
b.append("")
data = [1, 1, m['id'], 1, t['ord'], "", joinFields(a)]
full = self.col._renderQA(data)['q']
data = [1, 1, m['id'], 1, t['ord'], "", joinFields(b)]
empty = self.col._renderQA(data)['q']
# if full and empty are the same, the template is invalid and there is
# no way to satisfy it
if full == empty:
return "none", [], []
type = 'all'
req = []
for i in range(len(flds)):
tmp = a[:]
tmp[i] = ""
data[6] = joinFields(tmp)
# if no field content appeared, field is required
if "ankiflag" not in self.col._renderQA(data)['q']:
req.append(i)
if req:
return type, req
# if there are no required fields, switch to any mode
type = 'any'
req = []
for i in range(len(flds)):
tmp = b[:]
tmp[i] = "1"
data[6] = joinFields(tmp)
# if not the same as empty, this field can make the card non-blank
if self.col._renderQA(data)['q'] != empty:
req.append(i)
return type, req
def availOrds(self, m, flds):
"Given a joined field string, return available template ordinals."
if m['type'] == MODEL_CLOZE:
return self._availClozeOrds(m, flds)
fields = {}
for c, f in enumerate(splitFields(flds)):
fields[c] = f.strip()
avail = []
for ord, type, req in m['req']:
# unsatisfiable template
if type == "none":
continue
# AND requirement?
elif type == "all":
ok = True
for idx in req:
if not fields[idx]:
# missing and was required
ok = False
break
if not ok:
continue
# OR requirement?
elif type == "any":
ok = False
for idx in req:
if fields[idx]:
ok = True
break
if not ok:
continue
avail.append(ord)
return avail
def _availClozeOrds(self, m, flds, allowEmpty=True):
sflds = splitFields(flds)
map = self.fieldMap(m)
ords = set()
matches = re.findall("{{[^}]*?cloze:(?:[^}]?:)*(.+?)}}", m['tmpls'][0]['qfmt'])
matches += re.findall("<%cloze:(.+?)%>", m['tmpls'][0]['qfmt'])
for fname in matches:
if fname not in map:
continue
ord = map[fname][0]
ords.update([int(m)-1 for m in re.findall(
"{{c(\d+)::.+?}}", sflds[ord])])
if -1 in ords:
ords.remove(-1)
if not ords and allowEmpty:
# empty clozes use first ord
return [0]
return list(ords)
# Sync handling
##########################################################################
def beforeUpload(self):
for m in self.all():
m['usn'] = 0
self.save()
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/models.py
|
models.py
|
import re
import traceback
import urllib
import unicodedata
import sys
import zipfile
from cStringIO import StringIO
from anki.utils import checksum, isWin, isMac, json
from anki.db import DB
from anki.consts import *
from anki.latex import mungeQA
class MediaManager(object):
soundRegexps = ["(?i)(\[sound:(?P<fname>[^]]+)\])"]
imgRegexps = [
# src element quoted case
"(?i)(<img[^>]* src=(?P<str>[\"'])(?P<fname>[^>]+?)(?P=str)[^>]*>)",
# unquoted case
"(?i)(<img[^>]* src=(?!['\"])(?P<fname>[^ >]+)[^>]*?>)",
]
regexps = soundRegexps + imgRegexps
def __init__(self, col, server):
self.col = col
if server:
self._dir = None
return
# media directory
self._dir = re.sub("(?i)\.(anki2)$", ".media", self.col.path)
# convert dir to unicode if it's not already
if isinstance(self._dir, str):
self._dir = unicode(self._dir, sys.getfilesystemencoding())
if not os.path.exists(self._dir):
os.makedirs(self._dir)
try:
self._oldcwd = os.getcwd()
except OSError:
# cwd doesn't exist
self._oldcwd = None
try:
os.chdir(self._dir)
except OSError:
raise Exception("invalidTempFolder")
# change database
self.connect()
def connect(self):
if self.col.server:
return
path = self.dir()+".db2"
create = not os.path.exists(path)
os.chdir(self._dir)
self.db = DB(path)
if create:
self._initDB()
self.maybeUpgrade()
def _initDB(self):
self.db.executescript("""
create table media (
fname text not null primary key,
csum text, -- null indicates deleted file
mtime int not null, -- zero if deleted
dirty int not null
);
create index idx_media_dirty on media (dirty);
create table meta (dirMod int, lastUsn int); insert into meta values (0, 0);
""")
def maybeUpgrade(self):
oldpath = self.dir()+".db"
if os.path.exists(oldpath):
self.db.execute('attach "../collection.media.db" as old')
try:
self.db.execute("""
insert into media
select m.fname, csum, mod, ifnull((select 1 from log l2 where l2.fname=m.fname), 0) as dirty
from old.media m
left outer join old.log l using (fname)
union
select fname, null, 0, 1 from old.log where type=1;""")
self.db.execute("delete from meta")
self.db.execute("""
insert into meta select dirMod, usn from old.meta
""")
self.db.commit()
except Exception, e:
# if we couldn't import the old db for some reason, just start
# anew
self.col.log("failed to import old media db:"+traceback.format_exc())
self.db.execute("detach old")
npath = "../collection.media.db.old"
if os.path.exists(npath):
os.unlink(npath)
os.rename("../collection.media.db", npath)
def close(self):
if self.col.server:
return
self.db.close()
self.db = None
# change cwd back to old location
if self._oldcwd:
try:
os.chdir(self._oldcwd)
except:
# may have been deleted
pass
def dir(self):
return self._dir
def _isFAT32(self):
if not isWin:
return
import win32api, win32file
try:
name = win32file.GetVolumeNameForVolumeMountPoint(self._dir[:3])
except:
# mapped & unmapped network drive; pray that it's not vfat
return
if win32api.GetVolumeInformation(name)[4].lower().startswith("fat"):
return True
# Adding media
##########################################################################
# opath must be in unicode
def addFile(self, opath):
return self.writeData(opath, open(opath, "rb").read())
def writeData(self, opath, data):
# if fname is a full path, use only the basename
fname = os.path.basename(opath)
# make sure we write it in NFC form (on mac will autoconvert to NFD),
# and return an NFC-encoded reference
fname = unicodedata.normalize("NFC", fname)
# remove any dangerous characters
base = self.stripIllegal(fname)
(root, ext) = os.path.splitext(base)
def repl(match):
n = int(match.group(1))
return " (%d)" % (n+1)
# find the first available name
csum = checksum(data)
while True:
fname = root + ext
path = os.path.join(self.dir(), fname)
# if it doesn't exist, copy it directly
if not os.path.exists(path):
open(path, "wb").write(data)
return fname
# if it's identical, reuse
if checksum(open(path, "rb").read()) == csum:
return fname
# otherwise, increment the index in the filename
reg = " \((\d+)\)$"
if not re.search(reg, root):
root = root + " (1)"
else:
root = re.sub(reg, repl, root)
# String manipulation
##########################################################################
def filesInStr(self, mid, string, includeRemote=False):
l = []
model = self.col.models.get(mid)
strings = []
if model['type'] == MODEL_CLOZE and "{{c" in string:
# if the field has clozes in it, we'll need to expand the
# possibilities so we can render latex
strings = self._expandClozes(string)
else:
strings = [string]
for string in strings:
# handle latex
string = mungeQA(string, None, None, model, None, self.col)
# extract filenames
for reg in self.regexps:
for match in re.finditer(reg, string):
fname = match.group("fname")
isLocal = not re.match("(https?|ftp)://", fname.lower())
if isLocal or includeRemote:
l.append(fname)
return l
def _expandClozes(self, string):
ords = set(re.findall("{{c(\d+)::.+?}}", string))
strings = []
from anki.template.template import clozeReg
def qrepl(m):
if m.group(3):
return "[%s]" % m.group(3)
else:
return "[...]"
def arepl(m):
return m.group(1)
for ord in ords:
s = re.sub(clozeReg%ord, qrepl, string)
s = re.sub(clozeReg%".+?", "\\1", s)
strings.append(s)
strings.append(re.sub(clozeReg%".+?", arepl, string))
return strings
def transformNames(self, txt, func):
for reg in self.regexps:
txt = re.sub(reg, func, txt)
return txt
def strip(self, txt):
for reg in self.regexps:
txt = re.sub(reg, "", txt)
return txt
def escapeImages(self, string, unescape=False):
if unescape:
fn = urllib.unquote
else:
fn = urllib.quote
def repl(match):
tag = match.group(0)
fname = match.group("fname")
if re.match("(https?|ftp)://", fname):
return tag
return tag.replace(
fname, unicode(fn(fname.encode("utf-8")), "utf8"))
for reg in self.imgRegexps:
string = re.sub(reg, repl, string)
return string
# Rebuilding DB
##########################################################################
def check(self, local=None):
"Return (missingFiles, unusedFiles)."
mdir = self.dir()
# gather all media references in NFC form
allRefs = set()
for nid, mid, flds in self.col.db.execute("select id, mid, flds from notes"):
noteRefs = self.filesInStr(mid, flds)
# check the refs are in NFC
for f in noteRefs:
# if they're not, we'll need to fix them first
if f != unicodedata.normalize("NFC", f):
self._normalizeNoteRefs(nid)
noteRefs = self.filesInStr(mid, flds)
break
allRefs.update(noteRefs)
# loop through media folder
unused = []
invalid = []
if local is None:
files = os.listdir(mdir)
else:
files = local
renamedFiles = False
for file in files:
if not local:
if not os.path.isfile(file):
# ignore directories
continue
if file.startswith("_"):
# leading _ says to ignore file
continue
if not isinstance(file, unicode):
invalid.append(unicode(file, sys.getfilesystemencoding(), "replace"))
continue
nfcFile = unicodedata.normalize("NFC", file)
# we enforce NFC fs encoding on non-macs; on macs we'll have gotten
# NFD so we use the above variable for comparing references
if not isMac and not local:
if file != nfcFile:
# delete if we already have the NFC form, otherwise rename
if os.path.exists(nfcFile):
os.unlink(file)
renamedFiles = True
else:
os.rename(file, nfcFile)
renamedFiles = True
file = nfcFile
# compare
if nfcFile not in allRefs:
unused.append(file)
else:
allRefs.discard(nfcFile)
# if we renamed any files to nfc format, we must rerun the check
# to make sure the renamed files are not marked as unused
if renamedFiles:
return self.check(local=local)
nohave = [x for x in allRefs if not x.startswith("_")]
return (nohave, unused, invalid)
def _normalizeNoteRefs(self, nid):
note = self.col.getNote(nid)
for c, fld in enumerate(note.fields):
nfc = unicodedata.normalize("NFC", fld)
if nfc != fld:
note.fields[c] = nfc
note.flush()
# Copying on import
##########################################################################
def have(self, fname):
return os.path.exists(os.path.join(self.dir(), fname))
# Illegal characters
##########################################################################
_illegalCharReg = re.compile(r'[][><:"/?*^\\|\0\r\n]')
def stripIllegal(self, str):
return re.sub(self._illegalCharReg, "", str)
def hasIllegal(self, str):
# a file that couldn't be decoded to unicode is considered invalid
if not isinstance(str, unicode):
return True
return not not re.search(self._illegalCharReg, str)
# Tracking changes
##########################################################################
def findChanges(self):
"Scan the media folder if it's changed, and note any changes."
if self._changed():
self._logChanges()
def haveDirty(self):
return self.db.scalar("select 1 from media where dirty=1 limit 1")
def _mtime(self, path):
return int(os.stat(path).st_mtime)
def _checksum(self, path):
return checksum(open(path, "rb").read())
def _changed(self):
"Return dir mtime if it has changed since the last findChanges()"
# doesn't track edits, but user can add or remove a file to update
mod = self.db.scalar("select dirMod from meta")
mtime = self._mtime(self.dir())
if not self._isFAT32() and mod and mod == mtime:
return False
return mtime
def _logChanges(self):
(added, removed) = self._changes()
media = []
for f in added:
mt = self._mtime(f)
media.append((f, self._checksum(f), mt, 1))
for f in removed:
media.append((f, None, 0, 1))
# update media db
self.db.executemany("insert or replace into media values (?,?,?,?)",
media)
self.db.execute("update meta set dirMod = ?", self._mtime(self.dir()))
self.db.commit()
def _changes(self):
self.cache = {}
for (name, csum, mod) in self.db.execute(
"select fname, csum, mtime from media where csum is not null"):
self.cache[name] = [csum, mod, False]
added = []
removed = []
# loop through on-disk files
for f in os.listdir(self.dir()):
# ignore folders and thumbs.db
if os.path.isdir(f):
continue
if f.lower() == "thumbs.db":
continue
# and files with invalid chars
if self.hasIllegal(f):
continue
# empty files are invalid; clean them up and continue
sz = os.path.getsize(f)
if not sz:
os.unlink(f)
continue
if sz > 100*1024*1024:
self.col.log("ignoring file over 100MB", f)
continue
# check encoding
if not isMac:
normf = unicodedata.normalize("NFC", f)
if f != normf:
# wrong filename encoding which will cause sync errors
if os.path.exists(normf):
os.unlink(f)
else:
os.rename(f, normf)
# newly added?
if f not in self.cache:
added.append(f)
else:
# modified since last time?
if self._mtime(f) != self.cache[f][1]:
# and has different checksum?
if self._checksum(f) != self.cache[f][0]:
added.append(f)
# mark as used
self.cache[f][2] = True
# look for any entries in the cache that no longer exist on disk
for (k, v) in self.cache.items():
if not v[2]:
removed.append(k)
return added, removed
# Syncing-related
##########################################################################
def lastUsn(self):
return self.db.scalar("select lastUsn from meta")
def setLastUsn(self, usn):
self.db.execute("update meta set lastUsn = ?", usn)
self.db.commit()
def syncInfo(self, fname):
ret = self.db.first(
"select csum, dirty from media where fname=?", fname)
return ret or (None, 0)
def markClean(self, fnames):
for fname in fnames:
self.db.execute(
"update media set dirty=0 where fname=?", fname)
def syncDelete(self, fname):
if os.path.exists(fname):
os.unlink(fname)
self.db.execute("delete from media where fname=?", fname)
def mediaCount(self):
return self.db.scalar(
"select count() from media where csum is not null")
def dirtyCount(self):
return self.db.scalar(
"select count() from media where dirty=1")
def forceResync(self):
self.db.execute("delete from media")
self.db.execute("update meta set lastUsn=0,dirMod=0")
self.db.execute("vacuum analyze")
self.db.commit()
# Media syncing: zips
##########################################################################
def mediaChangesZip(self):
f = StringIO()
z = zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED)
fnames = []
# meta is list of (fname, zipname), where zipname of None
# is a deleted file
meta = []
sz = 0
for c, (fname, csum) in enumerate(self.db.execute(
"select fname, csum from media where dirty=1"
" limit %d"%SYNC_ZIP_COUNT)):
fnames.append(fname)
normname = unicodedata.normalize("NFC", fname)
if csum:
self.col.log("+media zip", fname)
z.write(fname, str(c))
meta.append((normname, str(c)))
sz += os.path.getsize(fname)
else:
self.col.log("-media zip", fname)
meta.append((normname, ""))
if sz >= SYNC_ZIP_SIZE:
break
z.writestr("_meta", json.dumps(meta))
z.close()
return f.getvalue(), fnames
def addFilesFromZip(self, zipData):
"Extract zip data; true if finished."
f = StringIO(zipData)
z = zipfile.ZipFile(f, "r")
media = []
# get meta info first
meta = json.loads(z.read("_meta"))
# then loop through all files
cnt = 0
for i in z.infolist():
if i.filename == "_meta":
# ignore previously-retrieved meta
continue
else:
data = z.read(i)
csum = checksum(data)
name = meta[i.filename]
if not isinstance(name, unicode):
name = unicode(name, "utf8")
# normalize name for platform
if isMac:
name = unicodedata.normalize("NFD", name)
else:
name = unicodedata.normalize("NFC", name)
# save file
open(name, "wb").write(data)
# update db
media.append((name, csum, self._mtime(name), 0))
cnt += 1
if media:
self.db.executemany(
"insert or replace into media values (?,?,?,?)", media)
return cnt
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/media.py
|
media.py
|
from anki.template import Template
import os.path
import re
class View(object):
# Path where this view's template(s) live
template_path = '.'
# Extension for templates
template_extension = 'mustache'
# The name of this template. If none is given the View will try
# to infer it based on the class name.
template_name = None
# Absolute path to the template itself. Pystache will try to guess
# if it's not provided.
template_file = None
# Contents of the template.
template = None
# Character encoding of the template file. If None, Pystache will not
# do any decoding of the template.
template_encoding = None
def __init__(self, template=None, context=None, **kwargs):
self.template = template
self.context = context or {}
# If the context we're handed is a View, we want to inherit
# its settings.
if isinstance(context, View):
self.inherit_settings(context)
if kwargs:
self.context.update(kwargs)
def inherit_settings(self, view):
"""Given another View, copies its settings."""
if view.template_path:
self.template_path = view.template_path
if view.template_name:
self.template_name = view.template_name
def load_template(self):
if self.template:
return self.template
if self.template_file:
return self._load_template()
name = self.get_template_name() + '.' + self.template_extension
if isinstance(self.template_path, basestring):
self.template_file = os.path.join(self.template_path, name)
return self._load_template()
for path in self.template_path:
self.template_file = os.path.join(path, name)
if os.path.exists(self.template_file):
return self._load_template()
raise IOError('"%s" not found in "%s"' % (name, ':'.join(self.template_path),))
def _load_template(self):
f = open(self.template_file, 'r')
try:
template = f.read()
if self.template_encoding:
template = unicode(template, self.template_encoding)
finally:
f.close()
return template
def get_template_name(self, name=None):
"""TemplatePartial => template_partial
Takes a string but defaults to using the current class' name or
the `template_name` attribute
"""
if self.template_name:
return self.template_name
if not name:
name = self.__class__.__name__
def repl(match):
return '_' + match.group(0).lower()
return re.sub('[A-Z]', repl, name)[1:]
def __contains__(self, needle):
return needle in self.context or hasattr(self, needle)
def __getitem__(self, attr):
val = self.get(attr, None)
if not val:
raise KeyError("No such key.")
return val
def get(self, attr, default):
attr = self.context.get(attr, getattr(self, attr, default))
if hasattr(attr, '__call__'):
return attr()
else:
return attr
def render(self, encoding=None):
template = self.load_template()
return Template(template, self).render(encoding=encoding)
def __str__(self):
return self.render()
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/template/view.py
|
view.py
|
========
Pystache
========
Inspired by ctemplate_ and et_, Mustache_ is a
framework-agnostic way to render logic-free views.
As ctemplates says, "It emphasizes separating logic from presentation:
it is impossible to embed application logic in this template language."
Pystache is a Python implementation of Mustache. Pystache requires
Python 2.6.
Documentation
=============
The different Mustache tags are documented at `mustache(5)`_.
Install It
==========
::
pip install pystache
Use It
======
::
>>> import pystache
>>> pystache.render('Hi {{person}}!', {'person': 'Mom'})
'Hi Mom!'
You can also create dedicated view classes to hold your view logic.
Here's your simple.py::
import pystache
class Simple(pystache.View):
def thing(self):
return "pizza"
Then your template, simple.mustache::
Hi {{thing}}!
Pull it together::
>>> Simple().render()
'Hi pizza!'
Test It
=======
nose_ works great! ::
pip install nose
cd pystache
nosetests
Author
======
::
context = { 'author': 'Chris Wanstrath', 'email': '[email protected]' }
pystache.render("{{author}} :: {{email}}", context)
.. _ctemplate: http://code.google.com/p/google-ctemplate/
.. _et: http://www.ivan.fomichev.name/2008/05/erlang-template-engine-prototype.html
.. _Mustache: http://defunkt.github.com/mustache/
.. _mustache(5): http://defunkt.github.com/mustache/mustache.5.html
.. _nose: http://somethingaboutorange.com/mrl/projects/nose/0.11.1/testing.html
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/template/README.rst
|
README.rst
|
import re
from anki.utils import stripHTML, stripHTMLMedia
from anki.hooks import runFilter
from anki.template import furigana; furigana.install()
from anki.template import hint; hint.install()
clozeReg = r"(?s)\{\{c%s::(.*?)(::(.*?))?\}\}"
modifiers = {}
def modifier(symbol):
"""Decorator for associating a function with a Mustache tag modifier.
@modifier('P')
def render_tongue(self, tag_name=None, context=None):
return ":P %s" % tag_name
{{P yo }} => :P yo
"""
def set_modifier(func):
modifiers[symbol] = func
return func
return set_modifier
def get_or_attr(obj, name, default=None):
try:
return obj[name]
except KeyError:
return default
except:
try:
return getattr(obj, name)
except AttributeError:
return default
class Template(object):
# The regular expression used to find a #section
section_re = None
# The regular expression used to find a tag.
tag_re = None
# Opening tag delimiter
otag = '{{'
# Closing tag delimiter
ctag = '}}'
def __init__(self, template, context=None):
self.template = template
self.context = context or {}
self.compile_regexps()
def render(self, template=None, context=None, encoding=None):
"""Turns a Mustache template into something wonderful."""
template = template or self.template
context = context or self.context
template = self.render_sections(template, context)
result = self.render_tags(template, context)
if encoding is not None:
result = result.encode(encoding)
return result
def compile_regexps(self):
"""Compiles our section and tag regular expressions."""
tags = { 'otag': re.escape(self.otag), 'ctag': re.escape(self.ctag) }
section = r"%(otag)s[\#|^]([^\}]*)%(ctag)s(.+?)%(otag)s/\1%(ctag)s"
self.section_re = re.compile(section % tags, re.M|re.S)
tag = r"%(otag)s(#|=|&|!|>|\{)?(.+?)\1?%(ctag)s+"
self.tag_re = re.compile(tag % tags)
def render_sections(self, template, context):
"""Expands sections."""
while 1:
match = self.section_re.search(template)
if match is None:
break
section, section_name, inner = match.group(0, 1, 2)
section_name = section_name.strip()
# check for cloze
m = re.match("c[qa]:(\d+):(.+)", section_name)
if m:
# get full field text
txt = get_or_attr(context, m.group(2), None)
m = re.search(clozeReg%m.group(1), txt)
if m:
it = m.group(1)
else:
it = None
else:
it = get_or_attr(context, section_name, None)
replacer = ''
# if it and isinstance(it, collections.Callable):
# replacer = it(inner)
if isinstance(it, basestring):
it = stripHTMLMedia(it).strip()
if it and not hasattr(it, '__iter__'):
if section[2] != '^':
replacer = inner
elif it and hasattr(it, 'keys') and hasattr(it, '__getitem__'):
if section[2] != '^':
replacer = self.render(inner, it)
elif it:
insides = []
for item in it:
insides.append(self.render(inner, item))
replacer = ''.join(insides)
elif not it and section[2] == '^':
replacer = inner
template = template.replace(section, replacer)
return template
def render_tags(self, template, context):
"""Renders all the tags in a template for a context."""
while 1:
match = self.tag_re.search(template)
if match is None:
break
tag, tag_type, tag_name = match.group(0, 1, 2)
tag_name = tag_name.strip()
try:
func = modifiers[tag_type]
replacement = func(self, tag_name, context)
template = template.replace(tag, replacement)
except (SyntaxError, KeyError):
return u"{{invalid template}}"
return template
# {{{ functions just like {{ in anki
@modifier('{')
def render_tag(self, tag_name, context):
return self.render_unescaped(tag_name, context)
@modifier('!')
def render_comment(self, tag_name=None, context=None):
"""Rendering a comment always returns nothing."""
return ''
@modifier(None)
def render_unescaped(self, tag_name=None, context=None):
"""Render a tag without escaping it."""
txt = get_or_attr(context, tag_name)
if txt is not None:
# some field names could have colons in them
# avoid interpreting these as field modifiers
# better would probably be to put some restrictions on field names
return txt
# field modifiers
parts = tag_name.split(':')
extra = None
if len(parts) == 1 or parts[0] == '':
return '{unknown field %s}' % tag_name
else:
mods, tag = parts[:-1], parts[-1] #py3k has *mods, tag = parts
txt = get_or_attr(context, tag)
#Since 'text:' and other mods can affect html on which Anki relies to
#process clozes, we need to make sure clozes are always
#treated after all the other mods, regardless of how they're specified
#in the template, so that {{cloze:text: == {{text:cloze:
#For type:, we return directly since no other mod than cloze (or other
#pre-defined mods) can be present and those are treated separately
mods.reverse()
mods.sort(key=lambda s: not s=="type")
for mod in mods:
# built-in modifiers
if mod == 'text':
# strip html
txt = stripHTML(txt) if txt else ""
elif mod == 'type':
# type answer field; convert it to [[type:...]] for the gui code
# to process
return "[[%s]]" % tag_name
elif mod.startswith('cq-') or mod.startswith('ca-'):
# cloze deletion
mod, extra = mod.split("-")
txt = self.clozeText(txt, extra, mod[1]) if txt and extra else ""
else:
# hook-based field modifier
mod, extra = re.search("^(.*?)(?:\((.*)\))?$", mod).groups()
txt = runFilter('fmod_' + mod, txt or '', extra or '', context,
tag, tag_name);
if txt is None:
return '{unknown field %s}' % tag_name
return txt
def clozeText(self, txt, ord, type):
reg = clozeReg
if not re.search(reg%ord, txt):
return ""
def repl(m):
# replace chosen cloze with type
if type == "q":
if m.group(3):
return "<span class=cloze>[%s]</span>" % m.group(3)
else:
return "<span class=cloze>[...]</span>"
else:
return "<span class=cloze>%s</span>" % m.group(1)
txt = re.sub(reg%ord, repl, txt)
# and display other clozes normally
return re.sub(reg%"\d+", "\\1", txt)
@modifier('=')
def render_delimiter(self, tag_name=None, context=None):
"""Changes the Mustache delimiter."""
try:
self.otag, self.ctag = tag_name.split(' ')
except ValueError:
# invalid
return
self.compile_regexps()
return ''
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/template/template.py
|
template.py
|
import time, re
from anki.db import DB
from anki.importing.noteimp import NoteImporter, ForeignNote, ForeignCard
from anki.stdmodels import addBasicModel, addClozeModel
from anki.lang import ngettext
class MnemosyneImporter(NoteImporter):
needMapper = False
update = False
allowHTML = True
def run(self):
db = DB(self.file)
ver = db.scalar(
"select value from global_variables where key='version'")
assert ver.startswith('Mnemosyne SQL 1') or ver == "2"
# gather facts into temp objects
curid = None
notes = {}
note = None
for _id, id, k, v in db.execute("""
select _id, id, key, value from facts f, data_for_fact d where
f._id=d._fact_id"""):
if id != curid:
if note:
notes[note['_id']] = note
note = {'_id': _id}
curid = id
note[k] = v
if note:
notes[note['_id']] = note
# gather cards
front = []
frontback = []
vocabulary = []
cloze = {}
for row in db.execute("""
select _fact_id, fact_view_id, tags, next_rep, last_rep, easiness,
acq_reps+ret_reps, lapses, card_type_id from cards"""):
# categorize note
note = notes[row[0]]
if row[1].endswith(".1"):
if row[1].startswith("1.") or row[1].startswith("1::"):
front.append(note)
elif row[1].startswith("2.") or row[1].startswith("2::"):
frontback.append(note)
elif row[1].startswith("3.") or row[1].startswith("3::"):
vocabulary.append(note)
elif row[1].startswith("5.1"):
cloze[row[0]] = note
# merge tags into note
tags = row[2].replace(", ", "\x1f").replace(" ", "_")
tags = tags.replace("\x1f", " ")
if "tags" not in note:
note['tags'] = []
note['tags'] += self.col.tags.split(tags)
note['tags'] = self.col.tags.canonify(note['tags'])
# if it's a new card we can go with the defaults
if row[3] == -1:
continue
# add the card
c = ForeignCard()
c.factor = int(row[5]*1000)
c.reps = row[6]
c.lapses = row[7]
# ivl is inferred in mnemosyne
next, prev = row[3:5]
c.ivl = max(1, (next - prev)/86400)
# work out how long we've got left
rem = int((next - time.time())/86400)
c.due = self.col.sched.today+rem
# get ord
m = re.search(".(\d+)$", row[1])
ord = int(m.group(1))-1
if 'cards' not in note:
note['cards'] = {}
note['cards'][ord] = c
self._addFronts(front)
total = self.total
self._addFrontBacks(frontback)
total += self.total
self._addVocabulary(vocabulary)
self.total += total
self._addCloze(cloze)
self.total += total
self.log.append(ngettext("%d note imported.", "%d notes imported.", self.total) % self.total)
def fields(self):
return self._fields
def _mungeField(self, fld):
# \n -> br
fld = re.sub("\r?\n", "<br>", fld)
# latex differences
fld = re.sub("(?i)<(/?(\$|\$\$|latex))>", "[\\1]", fld)
# audio differences
fld = re.sub("<audio src=\"(.+?)\">(</audio>)?", "[sound:\\1]", fld)
return fld
def _addFronts(self, notes, model=None, fields=("f", "b")):
data = []
for orig in notes:
# create a foreign note object
n = ForeignNote()
n.fields = []
for f in fields:
fld = self._mungeField(orig.get(f, ''))
n.fields.append(fld)
n.tags = orig['tags']
n.cards = orig.get('cards', {})
data.append(n)
# add a basic model
if not model:
model = addBasicModel(self.col)
model['name'] = "Mnemosyne-FrontOnly"
mm = self.col.models
mm.save(model)
mm.setCurrent(model)
self.model = model
self._fields = len(model['flds'])
self.initMapping()
# import
self.importNotes(data)
def _addFrontBacks(self, notes):
m = addBasicModel(self.col)
m['name'] = "Mnemosyne-FrontBack"
mm = self.col.models
t = mm.newTemplate("Back")
t['qfmt'] = "{{Back}}"
t['afmt'] = t['qfmt'] + "\n\n<hr id=answer>\n\n{{Front}}"
mm.addTemplate(m, t)
self._addFronts(notes, m)
def _addVocabulary(self, notes):
mm = self.col.models
m = mm.new("Mnemosyne-Vocabulary")
for f in "Expression", "Pronunciation", "Meaning", "Notes":
fm = mm.newField(f)
mm.addField(m, fm)
t = mm.newTemplate("Recognition")
t['qfmt'] = "{{Expression}}"
t['afmt'] = t['qfmt'] + """\n\n<hr id=answer>\n\n\
{{Pronunciation}}<br>\n{{Meaning}}<br>\n{{Notes}}"""
mm.addTemplate(m, t)
t = mm.newTemplate("Production")
t['qfmt'] = "{{Meaning}}"
t['afmt'] = t['qfmt'] + """\n\n<hr id=answer>\n\n\
{{Expression}}<br>\n{{Pronunciation}}<br>\n{{Notes}}"""
mm.addTemplate(m, t)
mm.add(m)
self._addFronts(notes, m, fields=("f", "p_1", "m_1", "n"))
def _addCloze(self, notes):
data = []
notes = notes.values()
for orig in notes:
# create a foreign note object
n = ForeignNote()
n.fields = []
fld = orig.get("text", "")
fld = re.sub("\r?\n", "<br>", fld)
state = dict(n=1)
def repl(match):
# replace [...] with cloze refs
res = ("{{c%d::%s}}" % (state['n'], match.group(1)))
state['n'] += 1
return res
fld = re.sub("\[(.+?)\]", repl, fld)
fld = self._mungeField(fld)
n.fields.append(fld)
n.fields.append("") # extra
n.tags = orig['tags']
n.cards = orig.get('cards', {})
data.append(n)
# add cloze model
model = addClozeModel(self.col)
model['name'] = "Mnemosyne-Cloze"
mm = self.col.models
mm.save(model)
mm.setCurrent(model)
self.model = model
self._fields = len(model['flds'])
self.initMapping()
self.importNotes(data)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/importing/mnemo.py
|
mnemo.py
|
import os
import unicodedata
from anki import Collection
from anki.utils import intTime, splitFields, joinFields, incGuid
from anki.importing.base import Importer
from anki.lang import _
from anki.lang import ngettext
GUID = 1
MID = 2
MOD = 3
class Anki2Importer(Importer):
needMapper = False
deckPrefix = None
allowUpdate = True
dupeOnSchemaChange = False
def run(self, media=None):
self._prepareFiles()
if media is not None:
# Anki1 importer has provided us with a custom media folder
self.src.media._dir = media
try:
self._import()
finally:
self.src.close(save=False)
def _prepareFiles(self):
self.dst = self.col
self.src = Collection(self.file)
def _import(self):
self._decks = {}
if self.deckPrefix:
id = self.dst.decks.id(self.deckPrefix)
self.dst.decks.select(id)
self._prepareTS()
self._prepareModels()
self._importNotes()
self._importCards()
self._importStaticMedia()
self._postImport()
self.dst.db.execute("vacuum")
self.dst.db.execute("analyze")
# Notes
######################################################################
def _importNotes(self):
# build guid -> (id,mod,mid) hash & map of existing note ids
self._notes = {}
existing = {}
for id, guid, mod, mid in self.dst.db.execute(
"select id, guid, mod, mid from notes"):
self._notes[guid] = (id, mod, mid)
existing[id] = True
# we may need to rewrite the guid if the model schemas don't match,
# so we need to keep track of the changes for the card import stage
self._changedGuids = {}
# apart from upgrading from anki1 decks, we ignore updates to changed
# schemas. we need to note the ignored guids, so we avoid importing
# invalid cards
self._ignoredGuids = {}
# iterate over source collection
add = []
update = []
dirty = []
usn = self.dst.usn()
dupes = 0
dupesIgnored = []
for note in self.src.db.execute(
"select * from notes"):
# turn the db result into a mutable list
note = list(note)
shouldAdd = self._uniquifyNote(note)
if shouldAdd:
# ensure id is unique
while note[0] in existing:
note[0] += 999
existing[note[0]] = True
# bump usn
note[4] = usn
# update media references in case of dupes
note[6] = self._mungeMedia(note[MID], note[6])
add.append(note)
dirty.append(note[0])
# note we have the added the guid
self._notes[note[GUID]] = (note[0], note[3], note[MID])
else:
# a duplicate or changed schema - safe to update?
dupes += 1
if self.allowUpdate:
oldNid, oldMod, oldMid = self._notes[note[GUID]]
# will update if incoming note more recent
if oldMod < note[MOD]:
# safe if note types identical
if oldMid == note[MID]:
# incoming note should use existing id
note[0] = oldNid
note[4] = usn
note[6] = self._mungeMedia(note[MID], note[6])
update.append(note)
dirty.append(note[0])
else:
dupesIgnored.append("%s: %s" % (
self.col.models.get(oldMid)['name'],
note[6].replace("\x1f", ",")
))
self._ignoredGuids[note[GUID]] = True
if dupes:
up = len(update)
self.log.append(_("Updated %(a)d of %(b)d existing notes.") % dict(
a=len(update), b=dupes))
if dupesIgnored:
self.log.append(_("Some updates were ignored because note type has changed:"))
self.log.extend(dupesIgnored)
# export info for calling code
self.dupes = dupes
self.added = len(add)
self.updated = len(update)
# add to col
self.dst.db.executemany(
"insert or replace into notes values (?,?,?,?,?,?,?,?,?,?,?)",
add)
self.dst.db.executemany(
"insert or replace into notes values (?,?,?,?,?,?,?,?,?,?,?)",
update)
self.dst.updateFieldCache(dirty)
self.dst.tags.registerNotes(dirty)
# determine if note is a duplicate, and adjust mid and/or guid as required
# returns true if note should be added
def _uniquifyNote(self, note):
origGuid = note[GUID]
srcMid = note[MID]
dstMid = self._mid(srcMid)
# duplicate schemas?
if srcMid == dstMid:
return origGuid not in self._notes
# differing schemas and note doesn't exist?
note[MID] = dstMid
if origGuid not in self._notes:
return True
# as the schemas differ and we already have a note with a different
# note type, this note needs a new guid
if not self.dupeOnSchemaChange:
return False
while True:
note[GUID] = incGuid(note[GUID])
self._changedGuids[origGuid] = note[GUID]
# if we don't have an existing guid, we can add
if note[GUID] not in self._notes:
return True
# if the existing guid shares the same mid, we can reuse
if dstMid == self._notes[note[GUID]][MID]:
return False
# Models
######################################################################
# Models in the two decks may share an ID but not a schema, so we need to
# compare the field & template signature rather than just rely on ID. If
# the schemas don't match, we increment the mid and try again, creating a
# new model if necessary.
def _prepareModels(self):
"Prepare index of schema hashes."
self._modelMap = {}
def _mid(self, srcMid):
"Return local id for remote MID."
# already processed this mid?
if srcMid in self._modelMap:
return self._modelMap[srcMid]
mid = srcMid
srcModel = self.src.models.get(srcMid)
srcScm = self.src.models.scmhash(srcModel)
while True:
# missing from target col?
if not self.dst.models.have(mid):
# copy it over
model = srcModel.copy()
model['id'] = mid
model['mod'] = intTime()
model['usn'] = self.col.usn()
self.dst.models.update(model)
break
# there's an existing model; do the schemas match?
dstModel = self.dst.models.get(mid)
dstScm = self.dst.models.scmhash(dstModel)
if srcScm == dstScm:
# they do; we can reuse this mid
model = srcModel.copy()
model['id'] = mid
model['mod'] = intTime()
model['usn'] = self.col.usn()
self.dst.models.update(model)
break
# as they don't match, try next id
mid += 1
# save map and return new mid
self._modelMap[srcMid] = mid
return mid
# Decks
######################################################################
def _did(self, did):
"Given did in src col, return local id."
# already converted?
if did in self._decks:
return self._decks[did]
# get the name in src
g = self.src.decks.get(did)
name = g['name']
# if there's a prefix, replace the top level deck
if self.deckPrefix:
tmpname = "::".join(name.split("::")[1:])
name = self.deckPrefix
if tmpname:
name += "::" + tmpname
# manually create any parents so we can pull in descriptions
head = ""
for parent in name.split("::")[:-1]:
if head:
head += "::"
head += parent
idInSrc = self.src.decks.id(head)
self._did(idInSrc)
# create in local
newid = self.dst.decks.id(name)
# pull conf over
if 'conf' in g and g['conf'] != 1:
conf = self.src.decks.getConf(g['conf'])
self.dst.decks.save(conf)
self.dst.decks.updateConf(conf)
g2 = self.dst.decks.get(newid)
g2['conf'] = g['conf']
self.dst.decks.save(g2)
# save desc
deck = self.dst.decks.get(newid)
deck['desc'] = g['desc']
self.dst.decks.save(deck)
# add to deck map and return
self._decks[did] = newid
return newid
# Cards
######################################################################
def _importCards(self):
# build map of (guid, ord) -> cid and used id cache
self._cards = {}
existing = {}
for guid, ord, cid in self.dst.db.execute(
"select f.guid, c.ord, c.id from cards c, notes f "
"where c.nid = f.id"):
existing[cid] = True
self._cards[(guid, ord)] = cid
# loop through src
cards = []
revlog = []
cnt = 0
usn = self.dst.usn()
aheadBy = self.src.sched.today - self.dst.sched.today
for card in self.src.db.execute(
"select f.guid, f.mid, c.* from cards c, notes f "
"where c.nid = f.id"):
guid = card[0]
if guid in self._changedGuids:
guid = self._changedGuids[guid]
if guid in self._ignoredGuids:
continue
# does the card's note exist in dst col?
if guid not in self._notes:
continue
dnid = self._notes[guid]
# does the card already exist in the dst col?
ord = card[5]
if (guid, ord) in self._cards:
# fixme: in future, could update if newer mod time
continue
# doesn't exist. strip off note info, and save src id for later
card = list(card[2:])
scid = card[0]
# ensure the card id is unique
while card[0] in existing:
card[0] += 999
existing[card[0]] = True
# update cid, nid, etc
card[1] = self._notes[guid][0]
card[2] = self._did(card[2])
card[4] = intTime()
card[5] = usn
# review cards have a due date relative to collection
if card[7] in (2, 3) or card[6] == 2:
card[8] -= aheadBy
# if odid true, convert card from filtered to normal
if card[15]:
# odid
card[15] = 0
# odue
card[8] = card[14]
card[14] = 0
# queue
if card[6] == 1: # type
card[7] = 0
else:
card[7] = card[6]
# type
if card[6] == 1:
card[6] = 0
cards.append(card)
# we need to import revlog, rewriting card ids and bumping usn
for rev in self.src.db.execute(
"select * from revlog where cid = ?", scid):
rev = list(rev)
rev[1] = card[0]
rev[2] = self.dst.usn()
revlog.append(rev)
cnt += 1
# apply
self.dst.db.executemany("""
insert or ignore into cards values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)""", cards)
self.dst.db.executemany("""
insert or ignore into revlog values (?,?,?,?,?,?,?,?,?)""", revlog)
self.log.append(ngettext("%d card imported.", "%d cards imported.", cnt) % cnt)
# Media
######################################################################
# note: this func only applies to imports of .anki2. for .apkg files, the
# apkg importer does the copying
def _importStaticMedia(self):
# Import any '_foo' prefixed media files regardless of whether
# they're used on notes or not
dir = self.src.media.dir()
if not os.path.exists(dir):
return
for fname in os.listdir(dir):
if fname.startswith("_") and not self.dst.media.have(fname):
self._writeDstMedia(fname, self._srcMediaData(fname))
def _mediaData(self, fname, dir=None):
if not dir:
dir = self.src.media.dir()
path = os.path.join(dir, fname)
try:
return open(path, "rb").read()
except (IOError, OSError):
return
def _srcMediaData(self, fname):
"Data for FNAME in src collection."
return self._mediaData(fname, self.src.media.dir())
def _dstMediaData(self, fname):
"Data for FNAME in dst collection."
return self._mediaData(fname, self.dst.media.dir())
def _writeDstMedia(self, fname, data):
path = os.path.join(self.dst.media.dir(),
unicodedata.normalize("NFC", fname))
try:
open(path, "wb").write(data)
except (OSError, IOError):
# the user likely used subdirectories
pass
def _mungeMedia(self, mid, fields):
fields = splitFields(fields)
def repl(match):
fname = match.group("fname")
srcData = self._srcMediaData(fname)
dstData = self._dstMediaData(fname)
if not srcData:
# file was not in source, ignore
return match.group(0)
# if model-local file exists from a previous import, use that
name, ext = os.path.splitext(fname)
lname = "%s_%s%s" % (name, mid, ext)
if self.dst.media.have(lname):
return match.group(0).replace(fname, lname)
# if missing or the same, pass unmodified
elif not dstData or srcData == dstData:
# need to copy?
if not dstData:
self._writeDstMedia(fname, srcData)
return match.group(0)
# exists but does not match, so we need to dedupe
self._writeDstMedia(lname, srcData)
return match.group(0).replace(fname, lname)
for i in range(len(fields)):
fields[i] = self.dst.media.transformNames(fields[i], repl)
return joinFields(fields)
# Post-import cleanup
######################################################################
def _postImport(self):
for did in self._decks.values():
self.col.sched.maybeRandomizeDeck(did)
# make sure new position is correct
self.dst.conf['nextPos'] = self.dst.db.scalar(
"select max(due)+1 from cards where type = 0") or 0
self.dst.save()
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/importing/anki2.py
|
anki2.py
|
import cgi
from anki.consts import NEW_CARDS_RANDOM
from anki.lang import _
from anki.utils import fieldChecksum, guid64, timestampID, \
joinFields, intTime, splitFields
from anki.importing.base import Importer
from anki.lang import ngettext
# Stores a list of fields, tags and deck
######################################################################
class ForeignNote(object):
"An temporary object storing fields and attributes."
def __init__(self):
self.fields = []
self.tags = []
self.deck = None
self.cards = {} # map of ord -> card
class ForeignCard(object):
def __init__(self):
self.due = 0
self.ivl = 1
self.factor = 2500
self.reps = 0
self.lapses = 0
# Base class for CSV and similar text-based imports
######################################################################
# The mapping is list of input fields, like:
# ['Expression', 'Reading', '_tags', None]
# - None means that the input should be discarded
# - _tags maps to note tags
# If the first field of the model is not in the map, the map is invalid.
# The import mode is one of:
# 0: update if first field matches existing note
# 1: ignore if first field matches existing note
# 2: import even if first field matches existing note
class NoteImporter(Importer):
needMapper = True
needDelimiter = False
allowHTML = False
importMode = 0
def __init__(self, col, file):
Importer.__init__(self, col, file)
self.model = col.models.current()
self.mapping = None
self._deckMap = {}
def run(self):
"Import."
assert self.mapping
c = self.foreignNotes()
self.importNotes(c)
def fields(self):
"The number of fields."
return 0
def initMapping(self):
flds = [f['name'] for f in self.model['flds']]
# truncate to provided count
flds = flds[0:self.fields()]
# if there's room left, add tags
if self.fields() > len(flds):
flds.append("_tags")
# and if there's still room left, pad
flds = flds + [None] * (self.fields() - len(flds))
self.mapping = flds
def mappingOk(self):
return self.model['flds'][0]['name'] in self.mapping
def foreignNotes(self):
"Return a list of foreign notes for importing."
assert 0
def open(self):
"Open file and ensure it's in the right format."
return
def importNotes(self, notes):
"Convert each card into a note, apply attributes and add to col."
assert self.mappingOk()
# note whether tags are mapped
self._tagsMapped = False
for f in self.mapping:
if f == "_tags":
self._tagsMapped = True
# gather checks for duplicate comparison
csums = {}
for csum, id in self.col.db.execute(
"select csum, id from notes where mid = ?", self.model['id']):
if csum in csums:
csums[csum].append(id)
else:
csums[csum] = [id]
firsts = {}
fld0idx = self.mapping.index(self.model['flds'][0]['name'])
self._fmap = self.col.models.fieldMap(self.model)
self._nextID = timestampID(self.col.db, "notes")
# loop through the notes
updates = []
updateLog = []
updateLogTxt = _("First field matched: %s")
dupeLogTxt = _("Added duplicate with first field: %s")
new = []
self._ids = []
self._cards = []
self._emptyNotes = False
dupeCount = 0
dupes = []
for n in notes:
for c in range(len(n.fields)):
if not self.allowHTML:
n.fields[c] = cgi.escape(n.fields[c])
n.fields[c] = n.fields[c].strip()
if not self.allowHTML:
n.fields[c] = n.fields[c].replace("\n", "<br>")
fld0 = n.fields[fld0idx]
csum = fieldChecksum(fld0)
# first field must exist
if not fld0:
self.log.append(_("Empty first field: %s") %
" ".join(n.fields))
continue
# earlier in import?
if fld0 in firsts and self.importMode != 2:
# duplicates in source file; log and ignore
self.log.append(_("Appeared twice in file: %s") %
fld0)
continue
firsts[fld0] = True
# already exists?
found = False
if csum in csums:
# csum is not a guarantee; have to check
for id in csums[csum]:
flds = self.col.db.scalar(
"select flds from notes where id = ?", id)
sflds = splitFields(flds)
if fld0 == sflds[0]:
# duplicate
found = True
if self.importMode == 0:
data = self.updateData(n, id, sflds)
if data:
updates.append(data)
updateLog.append(updateLogTxt % fld0)
dupeCount += 1
found = True
elif self.importMode == 1:
dupeCount += 1
elif self.importMode == 2:
# allow duplicates in this case
if fld0 not in dupes:
# only show message once, no matter how many
# duplicates are in the collection already
updateLog.append(dupeLogTxt % fld0)
dupes.append(fld0)
found = False
# newly add
if not found:
data = self.newData(n)
if data:
new.append(data)
# note that we've seen this note once already
firsts[fld0] = True
self.addNew(new)
self.addUpdates(updates)
# make sure to update sflds, etc
self.col.updateFieldCache(self._ids)
# generate cards
if self.col.genCards(self._ids):
self.log.insert(0, _(
"Empty cards found. Please run Tools>Empty Cards."))
# apply scheduling updates
self.updateCards()
# we randomize or order here, to ensure that siblings
# have the same due#
did = self.col.decks.selected()
conf = self.col.decks.confForDid(did)
# in order due?
if conf['new']['order'] == NEW_CARDS_RANDOM:
self.col.sched.randomizeCards(did)
else:
self.col.sched.orderCards(did)
part1 = ngettext("%d note added", "%d notes added", len(new)) % len(new)
part2 = ngettext("%d note updated", "%d notes updated",
self.updateCount) % self.updateCount
if self.importMode == 0:
unchanged = dupeCount - self.updateCount
elif self.importMode == 1:
unchanged = dupeCount
else:
unchanged = 0
part3 = ngettext("%d note unchanged", "%d notes unchanged",
unchanged) % unchanged
self.log.append("%s, %s, %s." % (part1, part2, part3))
self.log.extend(updateLog)
if self._emptyNotes:
self.log.append(_("""\
One or more notes were not imported, because they didn't generate any cards. \
This can happen when you have empty fields or when you have not mapped the \
content in the text file to the correct fields."""))
self.total = len(self._ids)
def newData(self, n):
id = self._nextID
self._nextID += 1
self._ids.append(id)
if not self.processFields(n):
return
# note id for card updates later
for ord, c in n.cards.items():
self._cards.append((id, ord, c))
self.col.tags.register(n.tags)
return [id, guid64(), self.model['id'],
intTime(), self.col.usn(), self.col.tags.join(n.tags),
n.fieldsStr, "", "", 0, ""]
def addNew(self, rows):
self.col.db.executemany(
"insert or replace into notes values (?,?,?,?,?,?,?,?,?,?,?)",
rows)
def updateData(self, n, id, sflds):
self._ids.append(id)
if not self.processFields(n, sflds):
return
if self._tagsMapped:
self.col.tags.register(n.tags)
tags = self.col.tags.join(n.tags)
return [intTime(), self.col.usn(), n.fieldsStr, tags,
id, n.fieldsStr, tags]
else:
return [intTime(), self.col.usn(), n.fieldsStr,
id, n.fieldsStr]
def addUpdates(self, rows):
old = self.col.db.totalChanges()
if self._tagsMapped:
self.col.db.executemany("""
update notes set mod = ?, usn = ?, flds = ?, tags = ?
where id = ? and (flds != ? or tags != ?)""", rows)
else:
self.col.db.executemany("""
update notes set mod = ?, usn = ?, flds = ?
where id = ? and flds != ?""", rows)
self.updateCount = self.col.db.totalChanges() - old
def processFields(self, note, fields=None):
if not fields:
fields = [""]*len(self.model['flds'])
for c, f in enumerate(self.mapping):
if not f:
continue
elif f == "_tags":
note.tags.extend(self.col.tags.split(note.fields[c]))
else:
sidx = self._fmap[f][0]
fields[sidx] = note.fields[c]
note.fieldsStr = joinFields(fields)
ords = self.col.models.availOrds(self.model, note.fieldsStr)
if not ords:
self._emptyNotes = True
return ords
def updateCards(self):
data = []
for nid, ord, c in self._cards:
data.append((c.ivl, c.due, c.factor, c.reps, c.lapses, nid, ord))
# we assume any updated cards are reviews
self.col.db.executemany("""
update cards set type = 2, queue = 2, ivl = ?, due = ?,
factor = ?, reps = ?, lapses = ? where nid = ? and ord = ?""", data)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/importing/noteimp.py
|
noteimp.py
|
import sys
from anki.stdmodels import addBasicModel
from anki.importing.noteimp import NoteImporter, ForeignNote, ForeignCard
from anki.lang import _
from anki.lang import ngettext
from xml.dom import minidom
from types import DictType, InstanceType
from string import capwords
import re, unicodedata, time
class SmartDict(dict):
"""
See http://www.peterbe.com/plog/SmartDict
Copyright 2005, Peter Bengtsson, [email protected]
A smart dict can be instanciated either from a pythonic dict
or an instance object (eg. SQL recordsets) but it ensures that you can
do all the convenient lookups such as x.first_name, x['first_name'] or
x.get('first_name').
"""
def __init__(self, *a, **kw):
if a:
if type(a[0]) is DictType:
kw.update(a[0])
elif type(a[0]) is InstanceType:
kw.update(a[0].__dict__)
elif hasattr(a[0], '__class__') and a[0].__class__.__name__=='SmartDict':
kw.update(a[0].__dict__)
dict.__init__(self, **kw)
self.__dict__ = self
class SuperMemoElement(SmartDict):
"SmartDict wrapper to store SM Element data"
def __init__(self, *a, **kw):
SmartDict.__init__(self, *a, **kw)
#default content
self.__dict__['lTitle'] = None
self.__dict__['Title'] = None
self.__dict__['Question'] = None
self.__dict__['Answer'] = None
self.__dict__['Count'] = None
self.__dict__['Type'] = None
self.__dict__['ID'] = None
self.__dict__['Interval'] = None
self.__dict__['Lapses'] = None
self.__dict__['Repetitions'] = None
self.__dict__['LastRepetiton'] = None
self.__dict__['AFactor'] = None
self.__dict__['UFactor'] = None
# This is an AnkiImporter
class SupermemoXmlImporter(NoteImporter):
needMapper = False
allowHTML = True
"""
Supermemo XML export's to Anki parser.
Goes through a SM collection and fetch all elements.
My SM collection was a big mess where topics and items were mixed.
I was unable to parse my content in a regular way like for loop on
minidom.getElementsByTagName() etc. My collection had also an
limitation, topics were splited into branches with max 100 items
on each. Learning themes were in deep structure. I wanted to have
full title on each element to be stored in tags.
Code should be upgrade to support importing of SM2006 exports.
"""
def __init__(self, *args):
"""Initialize internal varables.
Pameters to be exposed to GUI are stored in self.META"""
NoteImporter.__init__(self, *args)
m = addBasicModel(self.col)
m['name'] = "Supermemo"
self.col.models.save(m)
self.initMapping()
self.lines = None
self.numFields=int(2)
# SmXmlParse VARIABLES
self.xmldoc = None
self.pieces = []
self.cntBuf = [] #to store last parsed data
self.cntElm = [] #to store SM Elements data
self.cntCol = [] #to store SM Colections data
# store some meta info related to parse algorithm
# SmartDict works like dict / class wrapper
self.cntMeta = SmartDict()
self.cntMeta.popTitles = False
self.cntMeta.title = []
# META stores controls of import scritp, should be
# exposed to import dialog. These are default values.
self.META = SmartDict()
self.META.resetLearningData = False # implemented
self.META.onlyMemorizedItems = False # implemented
self.META.loggerLevel = 2 # implemented 0no,1info,2error,3debug
self.META.tagAllTopics = True
self.META.pathsToBeTagged = ['English for begginers', 'Advanced English 97', 'Phrasal Verbs'] # path patterns to be tagged - in gui entered like 'Advanced English 97|My Vocablary'
self.META.tagMemorizedItems = True # implemented
self.META.logToStdOutput = False # implemented
self.notes = []
## TOOLS
def _fudgeText(self, text):
"Replace sm syntax to Anki syntax"
text = text.replace("\n\r", u"<br>")
text = text.replace("\n", u"<br>")
return text
def _unicode2ascii(self,str):
"Remove diacritic punctuation from strings (titles)"
return u"".join([ c for c in unicodedata.normalize('NFKD', str) if not unicodedata.combining(c)])
def _decode_htmlescapes(self,s):
"""Unescape HTML code."""
#In case of bad formated html you can import MinimalSoup etc.. see btflsoup source code
from BeautifulSoup import BeautifulStoneSoup as btflsoup
#my sm2004 also ecaped & char in escaped sequences.
s = re.sub(u'&',u'&',s)
#unescaped solitary chars < or > that were ok for minidom confuse btfl soup
#s = re.sub(u'>',u'>',s)
#s = re.sub(u'<',u'<',s)
return unicode(btflsoup(s, selfClosingTags=['br','hr','img','wbr'], convertEntities=btflsoup.HTML_ENTITIES))
def _afactor2efactor(self, af):
# Adapted from <http://www.supermemo.com/beta/xml/xml-core.htm>
# Ranges for A-factors and E-factors
af_min = 1.2
af_max = 6.9
ef_min = 1.3
ef_max = 3.3
# Sanity checks for the A-factor
if af < af_min:
af = af_min
elif af > af_max:
af = af_max
# Scale af to the range 0..1
af_scaled = (af - af_min) / (af_max - af_min)
# Rescale to the interval ef_min..ef_max
ef = ef_min + af_scaled * (ef_max - ef_min)
return ef
## DEFAULT IMPORTER METHODS
def foreignNotes(self):
# Load file and parse it by minidom
self.loadSource(self.file)
# Migrating content / time consuming part
# addItemToCards is called for each sm element
self.logger(u'Parsing started.')
self.parse()
self.logger(u'Parsing done.')
# Return imported cards
self.total = len(self.notes)
self.log.append(ngettext("%d card imported.", "%d cards imported.", self.total) % self.total)
return self.notes
def fields(self):
return 2
## PARSER METHODS
def addItemToCards(self,item):
"This method actually do conversion"
# new anki card
note = ForeignNote()
# clean Q and A
note.fields.append(self._fudgeText(self._decode_htmlescapes(item.Question)))
note.fields.append(self._fudgeText(self._decode_htmlescapes(item.Answer)))
note.tags = []
# pre-process scheduling data
# convert learning data
if (not self.META.resetLearningData
and item.Interval >= 1
and getattr(item, "LastRepetition", None)):
# migration of LearningData algorithm
tLastrep = time.mktime(time.strptime(item.LastRepetition, '%d.%m.%Y'))
tToday = time.time()
card = ForeignCard()
card.ivl = int(item.Interval)
card.lapses = int(item.Lapses)
card.reps = int(item.Repetitions) + int(item.Lapses)
nextDue = tLastrep + (float(item.Interval) * 86400.0)
remDays = int((nextDue - time.time())/86400)
card.due = self.col.sched.today+remDays
card.factor = int(self._afactor2efactor(float(item.AFactor.replace(',','.')))*1000)
note.cards[0] = card
# categories & tags
# it's worth to have every theme (tree structure of sm collection) stored in tags, but sometimes not
# you can deceide if you are going to tag all toppics or just that containing some pattern
tTaggTitle = False
for pattern in self.META.pathsToBeTagged:
if item.lTitle != None and pattern.lower() in u" ".join(item.lTitle).lower():
tTaggTitle = True
break
if tTaggTitle or self.META.tagAllTopics:
# normalize - remove diacritic punctuation from unicode chars to ascii
item.lTitle = [ self._unicode2ascii(topic) for topic in item.lTitle]
# Transfrom xyz / aaa / bbb / ccc on Title path to Tag xyzAaaBbbCcc
# clean things like [999] or [111-2222] from title path, example: xyz / [1000-1200] zyx / xyz
# clean whitespaces
# set Capital letters for first char of the word
tmp = list(set([ re.sub('(\[[0-9]+\])' , ' ' , i ).replace('_',' ') for i in item.lTitle ]))
tmp = list(set([ re.sub('(\W)',' ', i ) for i in tmp ]))
tmp = list(set([ re.sub( '^[0-9 ]+$','',i) for i in tmp ]))
tmp = list(set([ capwords(i).replace(' ','') for i in tmp ]))
tags = [ j[0].lower() + j[1:] for j in tmp if j.strip() <> '']
note.tags += tags
if self.META.tagMemorizedItems and item.Interval >0:
note.tags.append("Memorized")
self.logger(u'Element tags\t- ' + `note.tags`, level=3)
self.notes.append(note)
def logger(self,text,level=1):
"Wrapper for Anki logger"
dLevels={0:'',1:u'Info',2:u'Verbose',3:u'Debug'}
if level<=self.META.loggerLevel:
#self.deck.updateProgress(_(text))
if self.META.logToStdOutput:
print self.__class__.__name__+ u" - " + dLevels[level].ljust(9) +u' -\t'+ _(text)
# OPEN AND LOAD
def openAnything(self,source):
"Open any source / actually only openig of files is used"
if source == "-":
return sys.stdin
# try to open with urllib (if source is http, ftp, or file URL)
import urllib
try:
return urllib.urlopen(source)
except (IOError, OSError):
pass
# try to open with native open function (if source is pathname)
try:
return open(source)
except (IOError, OSError):
pass
# treat source as string
import StringIO
return StringIO.StringIO(str(source))
def loadSource(self, source):
"""Load source file and parse with xml.dom.minidom"""
self.source = source
self.logger(u'Load started...')
sock = open(self.source)
self.xmldoc = minidom.parse(sock).documentElement
sock.close()
self.logger(u'Load done.')
# PARSE
def parse(self, node=None):
"Parse method - parses document elements"
if node==None and self.xmldoc<>None:
node = self.xmldoc
_method = "parse_%s" % node.__class__.__name__
if hasattr(self,_method):
parseMethod = getattr(self, _method)
parseMethod(node)
else:
self.logger(u'No handler for method %s' % _method, level=3)
def parse_Document(self, node):
"Parse XML document"
self.parse(node.documentElement)
def parse_Element(self, node):
"Parse XML element"
_method = "do_%s" % node.tagName
if hasattr(self,_method):
handlerMethod = getattr(self, _method)
handlerMethod(node)
else:
self.logger(u'No handler for method %s' % _method, level=3)
#print traceback.print_exc()
def parse_Text(self, node):
"Parse text inside elements. Text is stored into local buffer."
text = node.data
self.cntBuf.append(text)
#def parse_Comment(self, node):
# """
# Source can contain XML comments, but we ignore them
# """
# pass
# DO
def do_SuperMemoCollection(self, node):
"Process SM Collection"
for child in node.childNodes: self.parse(child)
def do_SuperMemoElement(self, node):
"Process SM Element (Type - Title,Topics)"
self.logger('='*45, level=3)
self.cntElm.append(SuperMemoElement())
self.cntElm[-1]['lTitle'] = self.cntMeta['title']
#parse all child elements
for child in node.childNodes: self.parse(child)
#strip all saved strings, just for sure
for key in self.cntElm[-1].keys():
if hasattr(self.cntElm[-1][key], 'strip'):
self.cntElm[-1][key]=self.cntElm[-1][key].strip()
#pop current element
smel = self.cntElm.pop()
# Process cntElm if is valid Item (and not an Topic etc..)
# if smel.Lapses != None and smel.Interval != None and smel.Question != None and smel.Answer != None:
if smel.Title == None and smel.Question != None and smel.Answer != None:
if smel.Answer.strip() !='' and smel.Question.strip() !='':
# migrate only memorized otherway skip/continue
if self.META.onlyMemorizedItems and not(int(smel.Interval) > 0):
self.logger(u'Element skiped \t- not memorized ...', level=3)
else:
#import sm element data to Anki
self.addItemToCards(smel)
self.logger(u"Import element \t- " + smel['Question'], level=3)
#print element
self.logger('-'*45, level=3)
for key in smel.keys():
self.logger('\t%s %s' % ((key+':').ljust(15),smel[key]), level=3 )
else:
self.logger(u'Element skiped \t- no valid Q and A ...', level=3)
else:
# now we know that item was topic
# parseing of whole node is now finished
# test if it's really topic
if smel.Title != None:
# remove topic from title list
t = self.cntMeta['title'].pop()
self.logger(u'End of topic \t- %s' % (t), level=2)
def do_Content(self, node):
"Process SM element Content"
for child in node.childNodes:
if hasattr(child,'tagName') and child.firstChild != None:
self.cntElm[-1][child.tagName]=child.firstChild.data
def do_LearningData(self, node):
"Process SM element LearningData"
for child in node.childNodes:
if hasattr(child,'tagName') and child.firstChild != None:
self.cntElm[-1][child.tagName]=child.firstChild.data
# It's being processed in do_Content now
#def do_Question(self, node):
# for child in node.childNodes: self.parse(child)
# self.cntElm[-1][node.tagName]=self.cntBuf.pop()
# It's being processed in do_Content now
#def do_Answer(self, node):
# for child in node.childNodes: self.parse(child)
# self.cntElm[-1][node.tagName]=self.cntBuf.pop()
def do_Title(self, node):
"Process SM element Title"
t = self._decode_htmlescapes(node.firstChild.data)
self.cntElm[-1][node.tagName] = t
self.cntMeta['title'].append(t)
self.cntElm[-1]['lTitle'] = self.cntMeta['title']
self.logger(u'Start of topic \t- ' + u" / ".join(self.cntMeta['title']), level=2)
def do_Type(self, node):
"Process SM element Type"
if len(self.cntBuf) >=1 :
self.cntElm[-1][node.tagName]=self.cntBuf.pop()
if __name__ == '__main__':
# for testing you can start it standalone
#file = u'/home/epcim/hg2g/dev/python/sm2anki/ADVENG2EXP.xxe.esc.zaloha_FINAL.xml'
#file = u'/home/epcim/hg2g/dev/python/anki/libanki/tests/importing/supermemo/original_ENGLISHFORBEGGINERS_noOEM.xml'
#file = u'/home/epcim/hg2g/dev/python/anki/libanki/tests/importing/supermemo/original_ENGLISHFORBEGGINERS_oem_1250.xml'
file = str(sys.argv[1])
impo = SupermemoXmlImporter(Deck(),file)
impo.foreignCards()
sys.exit(1)
# vim: ts=4 sts=2 ft=python
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/importing/supermemo_xml.py
|
supermemo_xml.py
|
import codecs
import csv
import re
from anki.importing.noteimp import NoteImporter, ForeignNote
from anki.lang import _
class TextImporter(NoteImporter):
needDelimiter = True
patterns = ("\t", ";")
def __init__(self, *args):
NoteImporter.__init__(self, *args)
self.lines = None
self.fileobj = None
self.delimiter = None
self.tagsToAdd = []
def foreignNotes(self):
self.open()
# process all lines
log = []
notes = []
lineNum = 0
ignored = 0
if self.delimiter:
reader = csv.reader(self.data, delimiter=self.delimiter, doublequote=True)
else:
reader = csv.reader(self.data, self.dialect, doublequote=True)
try:
for row in reader:
row = [unicode(x, "utf-8") for x in row]
if len(row) != self.numFields:
if row:
log.append(_(
"'%(row)s' had %(num1)d fields, "
"expected %(num2)d") % {
"row": u" ".join(row),
"num1": len(row),
"num2": self.numFields,
})
ignored += 1
continue
note = self.noteFromFields(row)
notes.append(note)
except (csv.Error), e:
log.append(_("Aborted: %s") % str(e))
self.log = log
self.ignored = ignored
self.fileobj.close()
return notes
def open(self):
"Parse the top line and determine the pattern and number of fields."
# load & look for the right pattern
self.cacheFile()
def cacheFile(self):
"Read file into self.lines if not already there."
if not self.fileobj:
self.openFile()
def openFile(self):
self.dialect = None
self.fileobj = open(self.file, "rbU")
self.data = self.fileobj.read()
if self.data.startswith(codecs.BOM_UTF8):
self.data = self.data[len(codecs.BOM_UTF8):]
def sub(s):
return re.sub("^\#.*$", "__comment", s)
self.data = [sub(x)+"\n" for x in self.data.split("\n") if sub(x) != "__comment"]
if self.data:
if self.data[0].startswith("tags:"):
tags = unicode(self.data[0][5:], "utf8").strip()
self.tagsToAdd = tags.split(" ")
del self.data[0]
self.updateDelimiter()
if not self.dialect and not self.delimiter:
raise Exception("unknownFormat")
def updateDelimiter(self):
def err():
raise Exception("unknownFormat")
self.dialect = None
sniffer = csv.Sniffer()
delims = [',', '\t', ';', ':']
if not self.delimiter:
try:
self.dialect = sniffer.sniff("\n".join(self.data[:10]),
delims)
except:
try:
self.dialect = sniffer.sniff(self.data[0], delims)
except:
pass
if self.dialect:
try:
reader = csv.reader(self.data, self.dialect, doublequote=True)
except:
err()
else:
if not self.delimiter:
if "\t" in self.data[0]:
self.delimiter = "\t"
elif ";" in self.data[0]:
self.delimiter = ";"
elif "," in self.data[0]:
self.delimiter = ","
else:
self.delimiter = " "
reader = csv.reader(self.data, delimiter=self.delimiter, doublequote=True)
try:
while True:
row = reader.next()
if row:
self.numFields = len(row)
break
except:
err()
self.initMapping()
def fields(self):
"Number of fields."
self.open()
return self.numFields
def noteFromFields(self, fields):
note = ForeignNote()
note.fields.extend([x for x in fields])
note.tags.extend(self.tagsToAdd)
return note
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/importing/csvfile.py
|
csvfile.py
|
import gzip, math, random, time, cgi
import xml.etree.ElementTree as ET
from anki.importing.noteimp import NoteImporter, ForeignNote, ForeignCard
from anki.stdmodels import addForwardReverse
ONE_DAY = 60*60*24
class PaukerImporter(NoteImporter):
'''Import Pauker 1.8 Lesson (*.pau.gz)'''
needMapper = False
allowHTML = True
def run(self):
model = addForwardReverse(self.col)
model['name'] = "Pauker"
self.col.models.save(model)
self.col.models.setCurrent(model)
self.model = model
self.initMapping()
NoteImporter.run(self)
def fields(self):
'''Pauker is Front/Back'''
return 2
def foreignNotes(self):
'''Build and return a list of notes.'''
notes = []
try:
f = gzip.open(self.file)
tree = ET.parse(f)
lesson = tree.getroot()
assert lesson.tag == "Lesson"
finally:
f.close()
index = -4
for batch in lesson.findall('./Batch'):
index += 1
for card in batch.findall('./Card'):
# Create a note for this card.
front = card.findtext('./FrontSide/Text')
back = card.findtext('./ReverseSide/Text')
note = ForeignNote()
note.fields = [cgi.escape(x.strip()).replace('\n','<br>').replace(' ',' ') for x in [front,back]]
notes.append(note)
# Determine due date for cards.
frontdue = card.find('./FrontSide[@LearnedTimestamp]')
backdue = card.find('./ReverseSide[@Batch][@LearnedTimestamp]')
if frontdue is not None:
note.cards[0] = self._learnedCard(index, int(frontdue.attrib['LearnedTimestamp']))
if backdue is not None:
note.cards[1] = self._learnedCard(int(backdue.attrib['Batch']), int(backdue.attrib['LearnedTimestamp']))
return notes
def _learnedCard(self, batch, timestamp):
ivl = math.exp(batch)
now = time.time()
due = ivl - (now - timestamp/1000.0)/ONE_DAY
fc = ForeignCard()
fc.due = self.col.sched.today + int(due+0.5)
fc.ivl = random.randint(int(ivl*0.90), int(ivl+0.5))
fc.factor = random.randint(1500,2500)
return fc
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/anki/importing/pauker.py
|
pauker.py
|
import aqt
from aqt.qt import *
from aqt.utils import saveGeom, restoreGeom
class TagLimit(QDialog):
def __init__(self, mw, parent):
QDialog.__init__(self, parent, Qt.Window)
self.mw = mw
self.parent = parent
self.deck = self.parent.deck
self.dialog = aqt.forms.taglimit.Ui_Dialog()
self.dialog.setupUi(self)
self.rebuildTagList()
restoreGeom(self, "tagLimit")
self.exec_()
def rebuildTagList(self):
usertags = self.mw.col.tags.all()
yes = self.deck.get("activeTags", [])
no = self.deck.get("inactiveTags", [])
yesHash = {}
noHash = {}
for y in yes:
yesHash[y] = True
for n in no:
noHash[n] = True
groupedTags = []
usertags.sort()
icon = QIcon(":/icons/Anki_Fact.png")
groupedTags.append([icon, usertags])
self.tags = []
for (icon, tags) in groupedTags:
for t in tags:
self.tags.append(t)
item = QListWidgetItem(icon, t.replace("_", " "))
self.dialog.activeList.addItem(item)
if t in yesHash:
mode = QItemSelectionModel.Select
self.dialog.activeCheck.setChecked(True)
else:
mode = QItemSelectionModel.Deselect
idx = self.dialog.activeList.indexFromItem(item)
self.dialog.activeList.selectionModel().select(idx, mode)
# inactive
item = QListWidgetItem(icon, t.replace("_", " "))
self.dialog.inactiveList.addItem(item)
if t in noHash:
mode = QItemSelectionModel.Select
else:
mode = QItemSelectionModel.Deselect
idx = self.dialog.inactiveList.indexFromItem(item)
self.dialog.inactiveList.selectionModel().select(idx, mode)
def reject(self):
self.tags = ""
QDialog.reject(self)
def accept(self):
self.hide()
n = 0
# gather yes/no tags
yes = []
no = []
for c in range(self.dialog.activeList.count()):
# active
if self.dialog.activeCheck.isChecked():
item = self.dialog.activeList.item(c)
idx = self.dialog.activeList.indexFromItem(item)
if self.dialog.activeList.selectionModel().isSelected(idx):
yes.append(self.tags[c])
# inactive
item = self.dialog.inactiveList.item(c)
idx = self.dialog.inactiveList.indexFromItem(item)
if self.dialog.inactiveList.selectionModel().isSelected(idx):
no.append(self.tags[c])
# save in the deck for future invocations
self.deck['activeTags'] = yes
self.deck['inactiveTags'] = no
self.mw.col.decks.save(self.deck)
# build query string
self.tags = ""
if yes:
arr = []
for req in yes:
arr.append("tag:'%s'" % req)
self.tags += "(" + " or ".join(arr) + ")"
if no:
arr = []
for req in no:
arr.append("-tag:'%s'" % req)
self.tags += " " + " ".join(arr)
saveGeom(self, "tagLimit")
QDialog.accept(self)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/taglimit.py
|
taglimit.py
|
import os, cPickle, ctypes, shutil
from aqt.qt import *
from anki.utils import isMac, isWin
from anki import Collection
from anki.importing import Anki1Importer
from aqt.utils import showWarning
import aqt
class Upgrader(object):
def __init__(self, mw):
self.mw = mw
def maybeUpgrade(self):
p = self._oldConfigPath()
# does an old config file exist?
if not p or not os.path.exists(p):
return
# load old settings and copy over
try:
self._loadConf(p)
except:
showWarning(_("""\
Anki wasn't able to load your old config file. Please use File>Import \
to import your decks from previous Anki versions."""))
return
if not self._copySettings():
return
# and show the wizard
self._showWizard()
# Settings
######################################################################
def _oldConfigPath(self):
if isWin:
try:
os.environ['HOME'] = os.environ['APPDATA']
except:
# system with %APPDATA% not defined
return None
p = "~/.anki/config.db"
elif isMac:
p = "~/Library/Application Support/Anki/config.db"
else:
p = "~/.anki/config.db"
return os.path.expanduser(p)
def _loadConf(self, path):
self.conf = cPickle.load(open(path))
def _copySettings(self):
p = self.mw.pm.profile
for k in (
"recentColours", "stripHTML", "editFontFamily", "editFontSize",
"editLineSize", "deleteMedia", "preserveKeyboard", "numBackups",
"proxyHost", "proxyPass", "proxyPort", "proxyUser"):
try:
p[k] = self.conf[k]
except:
showWarning(_("""\
Anki 2.0 only supports automatic upgrading from Anki 1.2. To load old \
decks, please open them in Anki 1.2 to upgrade them, and then import them \
into Anki 2.0."""))
return
return True
# Wizard
######################################################################
def _showWizard(self):
if not self.conf['recentDeckPaths']:
# if there are no decks to upgrade, don't show wizard
return
class Wizard(QWizard):
def reject(self):
pass
self.wizard = w = Wizard()
w.addPage(self._welcomePage())
w.addPage(self._decksPage())
w.addPage(self._mediaPage())
w.addPage(self._readyPage())
w.addPage(self._upgradePage())
w.addPage(self._finishedPage())
w.setWindowTitle(_("Upgrade Wizard"))
w.setWizardStyle(QWizard.ModernStyle)
w.setOptions(QWizard.NoCancelButton)
w.exec_()
def _labelPage(self, title, txt):
p = QWizardPage()
p.setTitle(title)
l = QLabel(txt)
l.setTextFormat(Qt.RichText)
l.setTextInteractionFlags(Qt.TextSelectableByMouse)
l.setWordWrap(True)
v = QVBoxLayout()
v.addWidget(l)
p.setLayout(v)
return p
def _welcomePage(self):
return self._labelPage(_("Welcome"), _("""\
This wizard will guide you through the Anki 2.0 upgrade process.
For a smooth upgrade, please read the following pages carefully.
"""))
def _decksPage(self):
return self._labelPage(_("Your Decks"), _("""\
Anki 2 stores your decks in a new format. This wizard will automatically
convert your decks to that format. Your decks will be backed up before
the upgrade, so if you need to revert to the previous version of Anki, your
decks will still be usable."""))
def _mediaPage(self):
return self._labelPage(_("Sounds & Images"), _("""\
When your decks are upgraded, Anki will attempt to copy any sounds and images
from the old decks. If you were using a custom DropBox folder or custom media
folder, the upgrade process may not be able to locate your media. Later on, a
report of the upgrade will be presented to you. If you notice media was not
copied when it should have been, please see the upgrade guide for more
instructions.
<p>
AnkiWeb now supports media syncing directly. No special setup is required, and
media will be synchronized along with your cards when you sync to AnkiWeb."""))
def _readyPage(self):
class ReadyPage(QWizardPage):
def initializePage(self):
self.setTitle(_("Ready to Upgrade"))
self.setCommitPage(True)
l = QLabel(_("""\
When you're ready to upgrade, click the commit button to continue. The upgrade
guide will open in your browser while the upgrade proceeds. Please read it
carefully, as a lot has changed since the previous Anki version."""))
l.setTextFormat(Qt.RichText)
l.setTextInteractionFlags(Qt.TextSelectableByMouse)
l.setWordWrap(True)
v = QVBoxLayout()
v.addWidget(l)
self.setLayout(v)
return ReadyPage()
def _upgradePage(self):
decks = self.conf['recentDeckPaths']
colpath = self.mw.pm.collectionPath()
upgrader = self
class UpgradePage(QWizardPage):
def isComplete(self):
return False
def initializePage(self):
# can't use openLink; gui not ready for tooltips
QDesktopServices.openUrl(QUrl(aqt.appChanges))
self.setCommitPage(True)
self.setTitle(_("Upgrading"))
self.label = l = QLabel()
l.setTextInteractionFlags(Qt.TextSelectableByMouse)
l.setWordWrap(True)
v = QVBoxLayout()
v.addWidget(l)
prog = QProgressBar()
prog.setMaximum(0)
v.addWidget(prog)
l2 = QLabel(_("Please be patient; this can take a while."))
l2.setTextInteractionFlags(Qt.TextSelectableByMouse)
l2.setWordWrap(True)
v.addWidget(l2)
self.setLayout(v)
# run the upgrade in a different thread
self.thread = UpgradeThread(decks, colpath, upgrader.conf)
self.thread.start()
# and periodically update the GUI
self.timer = QTimer(self)
self.timer.connect(self.timer, SIGNAL("timeout()"), self.onTimer)
self.timer.start(1000)
self.onTimer()
def onTimer(self):
prog = self.thread.progress()
if not prog:
self.timer.stop()
upgrader.log = self.thread.log
upgrader.wizard.next()
self.label.setText(prog)
return UpgradePage()
def _finishedPage(self):
upgrader = self
class FinishedPage(QWizardPage):
def initializePage(self):
buf = ""
for file in upgrader.log:
buf += "<b>%s</b>" % file[0]
buf += "<ul><li>" + "<li>".join(file[1]) + "</ul><p>"
self.setTitle(_("Upgrade Complete"))
l = QLabel(_("""\
The upgrade has finished, and you're ready to start using Anki 2.0.
<p>
Below is a log of the update:
<p>
%s<br><br>""") % buf)
l.setTextFormat(Qt.RichText)
l.setTextInteractionFlags(Qt.TextSelectableByMouse)
l.setWordWrap(True)
l.setMaximumWidth(400)
a = QScrollArea()
a.setWidget(l)
v = QVBoxLayout()
v.addWidget(a)
self.setLayout(v)
return FinishedPage()
class UpgradeThread(QThread):
def __init__(self, paths, colpath, oldprefs):
QThread.__init__(self)
self.paths = paths
self.max = len(paths)
self.current = 1
self.finished = False
self.colpath = colpath
self.oldprefs = oldprefs
self.name = ""
self.log = []
def run(self):
# open profile deck
self.col = Collection(self.colpath)
# loop through paths
while True:
path = self.paths.pop()
self.name = os.path.basename(path)
self.upgrade(path)
# abort if finished
if not self.paths:
break
self.current += 1
self.col.close()
self.finished = True
def progress(self):
if self.finished:
return
return _("Upgrading deck %(a)s of %(b)s...\n%(c)s") % \
dict(a=self.current, b=self.max, c=self.name)
def upgrade(self, path):
log = self._upgrade(path)
self.log.append((self.name, log))
def _upgrade(self, path):
if not os.path.exists(path):
return [_("File was missing.")]
imp = Anki1Importer(self.col, path)
# try to copy over dropbox media first
try:
self.maybeCopyFromCustomFolder(path)
except Exception, e:
imp.log.append(repr(str(e)))
# then run the import
try:
imp.run()
except Exception, e:
if repr(str(e)) == "invalidFile":
# already logged
pass
else:
imp.log.append(repr(str(e)))
self.col.save()
return imp.log
def maybeCopyFromCustomFolder(self, path):
folder = os.path.basename(path).replace(".anki", ".media")
loc = self.oldprefs.get("mediaLocation")
if not loc:
# no prefix; user had media next to deck
return
elif loc == "dropbox":
# dropbox no longer exports the folder location; try default
if isWin:
dll = ctypes.windll.shell32
buf = ctypes.create_string_buffer(300)
dll.SHGetSpecialFolderPathA(None, buf, 0x0005, False)
loc = os.path.join(buf.value, 'Dropbox')
else:
loc = os.path.expanduser("~/Dropbox")
loc = os.path.join(loc, "Public", "Anki")
# no media folder in custom location?
mfolder = os.path.join(loc, folder)
if not os.path.exists(mfolder):
return
# folder exists; copy data next to the deck. leave a copy in the
# custom location so users can revert easily.
mdir = self.col.media.dir()
for f in os.listdir(mfolder):
src = os.path.join(mfolder, f)
dst = os.path.join(mdir, f)
if not os.path.exists(dst):
shutil.copyfile(src, dst)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/upgrade.py
|
upgrade.py
|
from aqt.qt import *
from aqt.utils import askUser, getOnlyText, openLink, showWarning, shortcut, \
openHelp
from anki.utils import isMac, ids2str, fmtTimeSpan
import anki.js
from anki.errors import DeckRenameError
import aqt
from anki.sound import clearAudioQueue
class DeckBrowser(object):
def __init__(self, mw):
self.mw = mw
self.web = mw.web
self.bottom = aqt.toolbar.BottomBar(mw, mw.bottomWeb)
self.scrollPos = QPoint(0, 0)
def show(self):
clearAudioQueue()
self.web.setLinkHandler(self._linkHandler)
self.web.setKeyHandler(None)
self.mw.keyHandler = self._keyHandler
self._renderPage()
def refresh(self):
self._renderPage()
# Event handlers
##########################################################################
def _linkHandler(self, url):
if ":" in url:
(cmd, arg) = url.split(":")
else:
cmd = url
if cmd == "open":
self._selDeck(arg)
elif cmd == "opts":
self._showOptions(arg)
elif cmd == "shared":
self._onShared()
elif cmd == "import":
self.mw.onImport()
elif cmd == "lots":
openHelp("using-decks-appropriately")
elif cmd == "hidelots":
self.mw.pm.profile['hideDeckLotsMsg'] = True
self.refresh()
elif cmd == "create":
deck = getOnlyText(_("Name for deck:"))
if deck:
self.mw.col.decks.id(deck)
self.refresh()
elif cmd == "drag":
draggedDeckDid, ontoDeckDid = arg.split(',')
self._dragDeckOnto(draggedDeckDid, ontoDeckDid)
elif cmd == "collapse":
self._collapse(arg)
def _keyHandler(self, evt):
# currently does nothing
key = unicode(evt.text())
def _selDeck(self, did):
self.scrollPos = self.web.page().mainFrame().scrollPosition()
self.mw.col.decks.select(did)
self.mw.onOverview()
# HTML generation
##########################################################################
_dragIndicatorBorderWidth = "1px"
_css = """
a.deck { color: #000; text-decoration: none; min-width: 5em;
display:inline-block; }
a.deck:hover { text-decoration: underline; }
tr.deck td { border-bottom: %(width)s solid #e7e7e7; }
tr.top-level-drag-row td { border-bottom: %(width)s solid transparent; }
td { white-space: nowrap; }
tr.drag-hover td { border-bottom: %(width)s solid #aaa; }
body { margin: 1em; -webkit-user-select: none; }
.current { background-color: #e7e7e7; }
.decktd { min-width: 15em; }
.count { width: 6em; text-align: right; }
.collapse { color: #000; text-decoration:none; display:inline-block;
width: 1em; }
.filtered { color: #00a !important; }
""" % dict(width=_dragIndicatorBorderWidth)
_body = """
<center>
<table cellspacing=0 cellpading=3>
%(tree)s
</table>
<br>
%(stats)s
%(countwarn)s
</center>
<script>
$( init );
function init() {
$("tr.deck").draggable({
scroll: false,
// can't use "helper: 'clone'" because of a bug in jQuery 1.5
helper: function (event) {
return $(this).clone(false);
},
delay: 200,
opacity: 0.7
});
$("tr.deck").droppable({
drop: handleDropEvent,
hoverClass: 'drag-hover',
});
$("tr.top-level-drag-row").droppable({
drop: handleDropEvent,
hoverClass: 'drag-hover',
});
}
function handleDropEvent(event, ui) {
var draggedDeckId = ui.draggable.attr('id');
var ontoDeckId = $(this).attr('id');
py.link("drag:" + draggedDeckId + "," + ontoDeckId);
}
</script>
"""
def _renderPage(self, reuse=False):
css = self.mw.sharedCSS + self._css
if not reuse:
self._dueTree = self.mw.col.sched.deckDueTree()
tree = self._renderDeckTree(self._dueTree)
stats = self._renderStats()
op = self._oldPos()
self.web.stdHtml(self._body%dict(
tree=tree, stats=stats, countwarn=self._countWarn()), css=css,
js=anki.js.jquery+anki.js.ui, loadCB=lambda ok:\
self.web.page().mainFrame().setScrollPosition(op))
self.web.key = "deckBrowser"
self._drawButtons()
def _oldPos(self):
if self.web.key == "deckBrowser":
return self.web.page().mainFrame().scrollPosition()
else:
return self.scrollPos
def _renderStats(self):
cards, thetime = self.mw.col.db.first("""
select count(), sum(time)/1000 from revlog
where id > ?""", (self.mw.col.sched.dayCutoff-86400)*1000)
cards = cards or 0
thetime = thetime or 0
msgp1 = ngettext("<!--studied-->%d card", "<!--studied-->%d cards", cards) % cards
buf = _("Studied %(a)s in %(b)s today.") % dict(a=msgp1,
b=fmtTimeSpan(thetime, unit=1))
return buf
def _countWarn(self):
if (self.mw.col.decks.count() < 25 or
self.mw.pm.profile.get("hideDeckLotsMsg")):
return ""
return "<br><div style='width:50%;border: 1px solid #000;padding:5px;'>"+(
_("You have a lot of decks. Please see %(a)s. %(b)s") % dict(
a=("<a href=lots>%s</a>" % _("this page")),
b=("<br><small><a href=hidelots>(%s)</a></small>" % (_("hide"))+
"</div")))
def _renderDeckTree(self, nodes, depth=0):
if not nodes:
return ""
if depth == 0:
buf = """
<tr><th colspan=5 align=left>%s</th><th class=count>%s</th>
<th class=count>%s</th><th class=count></th></tr>""" % (
_("Deck"), _("Due"), _("New"))
buf += self._topLevelDragRow()
else:
buf = ""
for node in nodes:
buf += self._deckRow(node, depth, len(nodes))
if depth == 0:
buf += self._topLevelDragRow()
return buf
def _deckRow(self, node, depth, cnt):
name, did, due, lrn, new, children = node
deck = self.mw.col.decks.get(did)
if did == 1 and cnt > 1 and not children:
# if the default deck is empty, hide it
if not self.mw.col.db.scalar("select 1 from cards where did = 1"):
return ""
# parent toggled for collapsing
for parent in self.mw.col.decks.parents(did):
if parent['collapsed']:
buff = ""
return buff
prefix = "-"
if self.mw.col.decks.get(did)['collapsed']:
prefix = "+"
due += lrn
def indent():
return " "*6*depth
if did == self.mw.col.conf['curDeck']:
klass = 'deck current'
else:
klass = 'deck'
buf = "<tr class='%s' id='%d'>" % (klass, did)
# deck link
if children:
collapse = "<a class=collapse href='collapse:%d'>%s</a>" % (did, prefix)
else:
collapse = "<span class=collapse></span>"
if deck['dyn']:
extraclass = "filtered"
else:
extraclass = ""
buf += """
<td class=decktd colspan=5>%s%s<a class="deck %s" href='open:%d'>%s</a></td>"""% (
indent(), collapse, extraclass, did, name)
# due counts
def nonzeroColour(cnt, colour):
if not cnt:
colour = "#e0e0e0"
if cnt >= 1000:
cnt = "1000+"
return "<font color='%s'>%s</font>" % (colour, cnt)
buf += "<td align=right>%s</td><td align=right>%s</td>" % (
nonzeroColour(due, "#007700"),
nonzeroColour(new, "#000099"))
# options
buf += "<td align=right class=opts>%s</td></tr>" % self.mw.button(
link="opts:%d"%did, name="<img valign=bottom src='qrc:/icons/gears.png'>▼")
# children
buf += self._renderDeckTree(children, depth+1)
return buf
def _topLevelDragRow(self):
return "<tr class='top-level-drag-row'><td colspan='6'> </td></tr>"
def _dueImg(self, due, new):
if due:
i = "clock-icon"
elif new:
i = "plus-circle"
else:
i = "none"
return '<img valign=bottom src="qrc:/icons/%s.png">' % i
# Options
##########################################################################
def _showOptions(self, did):
m = QMenu(self.mw)
a = m.addAction(_("Rename"))
a.connect(a, SIGNAL("triggered()"), lambda did=did: self._rename(did))
a = m.addAction(_("Options"))
a.connect(a, SIGNAL("triggered()"), lambda did=did: self._options(did))
a = m.addAction(_("Export"))
a.connect(a, SIGNAL("triggered()"), lambda did=did: self._export(did))
a = m.addAction(_("Delete"))
a.connect(a, SIGNAL("triggered()"), lambda did=did: self._delete(did))
m.exec_(QCursor.pos())
def _export(self, did):
self.mw.onExport(did=did)
def _rename(self, did):
self.mw.checkpoint(_("Rename Deck"))
deck = self.mw.col.decks.get(did)
oldName = deck['name']
newName = getOnlyText(_("New deck name:"), default=oldName)
newName = newName.replace('"', "")
if not newName or newName == oldName:
return
try:
self.mw.col.decks.rename(deck, newName)
except DeckRenameError, e:
return showWarning(e.description)
self.show()
def _options(self, did):
# select the deck first, because the dyn deck conf assumes the deck
# we're editing is the current one
self.mw.col.decks.select(did)
self.mw.onDeckConf()
def _collapse(self, did):
self.mw.col.decks.collapse(did)
self._renderPage(reuse=True)
def _dragDeckOnto(self, draggedDeckDid, ontoDeckDid):
try:
self.mw.col.decks.renameForDragAndDrop(draggedDeckDid, ontoDeckDid)
except DeckRenameError, e:
return showWarning(e.description)
self.show()
def _delete(self, did):
if str(did) == '1':
return showWarning(_("The default deck can't be deleted."))
self.mw.checkpoint(_("Delete Deck"))
deck = self.mw.col.decks.get(did)
if not deck['dyn']:
dids = [did] + [r[1] for r in self.mw.col.decks.children(did)]
cnt = self.mw.col.db.scalar(
"select count() from cards where did in {0} or "
"odid in {0}".format(ids2str(dids)))
if cnt:
extra = ngettext(" It has %d card.", " It has %d cards.", cnt) % cnt
else:
extra = None
if deck['dyn'] or not extra or askUser(
(_("Are you sure you wish to delete %s?") % deck['name']) +
extra):
self.mw.progress.start(immediate=True)
self.mw.col.decks.rem(did, True)
self.mw.progress.finish()
self.show()
# Top buttons
######################################################################
def _drawButtons(self):
links = [
["", "shared", _("Get Shared")],
["", "create", _("Create Deck")],
["Ctrl+I", "import", _("Import File")],
]
buf = ""
for b in links:
if b[0]:
b[0] = _("Shortcut key: %s") % shortcut(b[0])
buf += """
<button title='%s' onclick='py.link(\"%s\");'>%s</button>""" % tuple(b)
self.bottom.draw(buf)
if isMac:
size = 28
else:
size = 36 + self.mw.fontHeightDelta*3
self.bottom.web.setFixedHeight(size)
self.bottom.web.setLinkHandler(self._linkHandler)
def _onShared(self):
openLink(aqt.appShared+"decks/")
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/deckbrowser.py
|
deckbrowser.py
|
from aqt.qt import *
import os, time
from aqt.utils import saveGeom, restoreGeom, maybeHideClose, showInfo, addCloseShortcut
import aqt
# Deck Stats
######################################################################
class DeckStats(QDialog):
def __init__(self, mw):
QDialog.__init__(self, mw, Qt.Window)
self.mw = mw
self.name = "deckStats"
self.period = 0
self.form = aqt.forms.stats.Ui_Dialog()
self.oldPos = None
self.wholeCollection = False
self.setMinimumWidth(700)
f = self.form
f.setupUi(self)
restoreGeom(self, self.name)
b = f.buttonBox.addButton(_("Save Image"),
QDialogButtonBox.ActionRole)
b.connect(b, SIGNAL("clicked()"), self.browser)
b.setAutoDefault(False)
c = self.connect
s = SIGNAL("clicked()")
c(f.groups, s, lambda: self.changeScope("deck"))
f.groups.setShortcut("g")
c(f.all, s, lambda: self.changeScope("collection"))
c(f.month, s, lambda: self.changePeriod(0))
c(f.year, s, lambda: self.changePeriod(1))
c(f.life, s, lambda: self.changePeriod(2))
c(f.web, SIGNAL("loadFinished(bool)"), self.loadFin)
maybeHideClose(self.form.buttonBox)
addCloseShortcut(self)
self.refresh()
self.exec_()
def reject(self):
saveGeom(self, self.name)
QDialog.reject(self)
def browser(self):
name = time.strftime("-%Y-%m-%d@%H-%M-%S.png",
time.localtime(time.time()))
name = "anki-"+_("stats")+name
desktopPath = QDesktopServices.storageLocation(QDesktopServices.DesktopLocation)
if not os.path.exists(desktopPath):
os.mkdir(desktopPath)
path = os.path.join(desktopPath, name)
p = self.form.web.page()
oldsize = p.viewportSize()
p.setViewportSize(p.mainFrame().contentsSize())
image = QImage(p.viewportSize(), QImage.Format_ARGB32)
painter = QPainter(image)
p.mainFrame().render(painter)
painter.end()
isOK = image.save(path, "png")
if isOK:
showInfo(_("An image was saved to your desktop."))
else:
showInfo(_("""\
Anki could not save the image. Please check that you have permission to write \
to your desktop."""))
p.setViewportSize(oldsize)
def changePeriod(self, n):
self.period = n
self.refresh()
def changeScope(self, type):
self.wholeCollection = type == "collection"
self.refresh()
def loadFin(self, b):
self.form.web.page().mainFrame().setScrollPosition(self.oldPos)
def refresh(self):
self.mw.progress.start(immediate=True)
self.oldPos = self.form.web.page().mainFrame().scrollPosition()
stats = self.mw.col.stats()
stats.wholeCollection = self.wholeCollection
self.report = stats.report(type=self.period)
self.form.web.setHtml(self.report)
self.mw.progress.finish()
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/stats.py
|
stats.py
|
import sys
from anki.hooks import runHook
from aqt.qt import *
from aqt.utils import openLink
from anki.utils import isMac, isWin
import anki.js
# Bridge for Qt<->JS
##########################################################################
class Bridge(QObject):
@pyqtSlot(str, result=str)
def run(self, str):
return unicode(self._bridge(unicode(str)))
@pyqtSlot(str)
def link(self, str):
self._linkHandler(unicode(str))
def setBridge(self, func):
self._bridge = func
def setLinkHandler(self, func):
self._linkHandler = func
# Page for debug messages
##########################################################################
class AnkiWebPage(QWebPage):
def __init__(self, jsErr):
QWebPage.__init__(self)
self._jsErr = jsErr
def javaScriptConsoleMessage(self, msg, line, srcID):
self._jsErr(msg, line, srcID)
# Main web view
##########################################################################
class AnkiWebView(QWebView):
def __init__(self, canFocus=True):
QWebView.__init__(self)
self.setRenderHints(
QPainter.TextAntialiasing |
QPainter.SmoothPixmapTransform |
QPainter.HighQualityAntialiasing)
self.setObjectName("mainText")
self._bridge = Bridge()
self._page = AnkiWebPage(self._jsErr)
self._loadFinishedCB = None
self.setPage(self._page)
self.page().setLinkDelegationPolicy(QWebPage.DelegateAllLinks)
self.setLinkHandler()
self.setKeyHandler()
self.connect(self, SIGNAL("linkClicked(QUrl)"), self._linkHandler)
self.connect(self, SIGNAL("loadFinished(bool)"), self._loadFinished)
self.allowDrops = False
# reset each time new html is set; used to detect if still in same state
self.key = None
self.setCanFocus(canFocus)
def keyPressEvent(self, evt):
if evt.matches(QKeySequence.Copy):
self.triggerPageAction(QWebPage.Copy)
evt.accept()
# work around a bug with windows qt where shift triggers buttons
if isWin and evt.modifiers() & Qt.ShiftModifier and not evt.text():
evt.accept()
return
QWebView.keyPressEvent(self, evt)
def keyReleaseEvent(self, evt):
if self._keyHandler:
if self._keyHandler(evt):
evt.accept()
return
QWebView.keyReleaseEvent(self, evt)
def contextMenuEvent(self, evt):
if not self._canFocus:
return
m = QMenu(self)
a = m.addAction(_("Copy"))
a.connect(a, SIGNAL("triggered()"),
lambda: self.triggerPageAction(QWebPage.Copy))
runHook("AnkiWebView.contextMenuEvent", self, m)
m.popup(QCursor.pos())
def dropEvent(self, evt):
pass
def setLinkHandler(self, handler=None):
if handler:
self.linkHandler = handler
else:
self.linkHandler = self._openLinksExternally
self._bridge.setLinkHandler(self.linkHandler)
def setKeyHandler(self, handler=None):
# handler should return true if event should be swallowed
self._keyHandler = handler
def setHtml(self, html, loadCB=None):
self.key = None
self._loadFinishedCB = loadCB
QWebView.setHtml(self, html)
def stdHtml(self, body, css="", bodyClass="", loadCB=None, js=None, head=""):
if isMac:
button = "font-weight: bold; height: 24px;"
else:
button = "font-weight: normal;"
self.setHtml("""
<!doctype html>
<html><head><style>
button {
%s
}
%s</style>
<script>%s</script>
%s
</head>
<body class="%s">%s</body></html>""" % (
button, css, js or anki.js.jquery+anki.js.browserSel,
head, bodyClass, body), loadCB)
def setBridge(self, bridge):
self._bridge.setBridge(bridge)
def setCanFocus(self, canFocus=False):
self._canFocus = canFocus
if self._canFocus:
self.setFocusPolicy(Qt.WheelFocus)
else:
self.setFocusPolicy(Qt.NoFocus)
def eval(self, js):
self.page().mainFrame().evaluateJavaScript(js)
def _openLinksExternally(self, url):
openLink(url)
def _jsErr(self, msg, line, srcID):
sys.stdout.write(
(_("JS error on line %(a)d: %(b)s") %
dict(a=line, b=msg+"\n")).encode("utf8"))
def _linkHandler(self, url):
self.linkHandler(url.toString())
def _loadFinished(self):
self.page().mainFrame().addToJavaScriptWindowObject("py", self._bridge)
if self._loadFinishedCB:
self._loadFinishedCB(self)
self._loadFinishedCB = None
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/webview.py
|
webview.py
|
import os
import re
import traceback
import zipfile
import json
from aqt.qt import *
import anki.importing as importing
from aqt.utils import getOnlyText, getFile, showText, showWarning, openHelp,\
askUser, tooltip
from anki.hooks import addHook, remHook
import aqt.forms
import aqt.modelchooser
import aqt.deckchooser
class ChangeMap(QDialog):
def __init__(self, mw, model, current):
QDialog.__init__(self, mw, Qt.Window)
self.mw = mw
self.model = model
self.frm = aqt.forms.changemap.Ui_ChangeMap()
self.frm.setupUi(self)
n = 0
setCurrent = False
for field in self.model['flds']:
item = QListWidgetItem(_("Map to %s") % field['name'])
self.frm.fields.addItem(item)
if current == field['name']:
setCurrent = True
self.frm.fields.setCurrentRow(n)
n += 1
self.frm.fields.addItem(QListWidgetItem(_("Map to Tags")))
self.frm.fields.addItem(QListWidgetItem(_("Ignore field")))
if not setCurrent:
if current == "_tags":
self.frm.fields.setCurrentRow(n)
else:
self.frm.fields.setCurrentRow(n+1)
self.field = None
def getField(self):
self.exec_()
return self.field
def accept(self):
row = self.frm.fields.currentRow()
if row < len(self.model['flds']):
self.field = self.model['flds'][row]['name']
elif row == self.frm.fields.count() - 2:
self.field = "_tags"
else:
self.field = None
QDialog.accept(self)
def reject(self):
self.accept()
class ImportDialog(QDialog):
def __init__(self, mw, importer):
QDialog.__init__(self, mw, Qt.Window)
self.mw = mw
self.importer = importer
self.frm = aqt.forms.importing.Ui_ImportDialog()
self.frm.setupUi(self)
self.connect(self.frm.buttonBox.button(QDialogButtonBox.Help),
SIGNAL("clicked()"), self.helpRequested)
self.setupMappingFrame()
self.setupOptions()
self.modelChanged()
self.frm.autoDetect.setVisible(self.importer.needDelimiter)
addHook("currentModelChanged", self.modelChanged)
self.connect(self.frm.autoDetect, SIGNAL("clicked()"),
self.onDelimiter)
self.updateDelimiterButtonText()
self.frm.allowHTML.setChecked(self.mw.pm.profile.get('allowHTML', True))
self.frm.importMode.setCurrentIndex(self.mw.pm.profile.get('importMode', 1))
# import button
b = QPushButton(_("Import"))
self.frm.buttonBox.addButton(b, QDialogButtonBox.AcceptRole)
self.exec_()
def setupOptions(self):
self.model = self.mw.col.models.current()
self.modelChooser = aqt.modelchooser.ModelChooser(
self.mw, self.frm.modelArea, label=False)
self.deck = aqt.deckchooser.DeckChooser(
self.mw, self.frm.deckArea, label=False)
def modelChanged(self):
self.importer.model = self.mw.col.models.current()
self.importer.initMapping()
self.showMapping()
if self.mw.col.conf.get("addToCur", True):
did = self.mw.col.conf['curDeck']
if self.mw.col.decks.isDyn(did):
did = 1
else:
did = self.importer.model['did']
#self.deck.setText(self.mw.col.decks.name(did))
def onDelimiter(self):
str = getOnlyText(_("""\
By default, Anki will detect the character between fields, such as
a tab, comma, and so on. If Anki is detecting the character incorrectly,
you can enter it here. Use \\t to represent tab."""),
self, help="importing") or "\t"
str = str.replace("\\t", "\t")
str = str.encode("ascii")
self.hideMapping()
def updateDelim():
self.importer.delimiter = str
self.importer.updateDelimiter()
self.showMapping(hook=updateDelim)
self.updateDelimiterButtonText()
def updateDelimiterButtonText(self):
if not self.importer.needDelimiter:
return
if self.importer.delimiter:
d = self.importer.delimiter
else:
d = self.importer.dialect.delimiter
if d == "\t":
d = _("Tab")
elif d == ",":
d = _("Comma")
elif d == " ":
d = _("Space")
elif d == ";":
d = _("Semicolon")
elif d == ":":
d = _("Colon")
else:
d = `d`
txt = _("Fields separated by: %s") % d
self.frm.autoDetect.setText(txt)
def accept(self):
self.importer.mapping = self.mapping
if not self.importer.mappingOk():
showWarning(
_("The first field of the note type must be mapped."))
return
self.importer.importMode = self.frm.importMode.currentIndex()
self.mw.pm.profile['importMode'] = self.importer.importMode
self.importer.allowHTML = self.frm.allowHTML.isChecked()
self.mw.pm.profile['allowHTML'] = self.importer.allowHTML
did = self.deck.selectedId()
if did != self.importer.model['did']:
self.importer.model['did'] = did
self.mw.col.models.save(self.importer.model)
self.mw.col.decks.select(did)
self.mw.progress.start(immediate=True)
self.mw.checkpoint(_("Import"))
try:
self.importer.run()
except UnicodeDecodeError:
showUnicodeWarning()
return
except Exception, e:
msg = _("Import failed.\n")
err = repr(str(e))
if "1-character string" in err:
msg += err
elif "invalidTempFolder" in err:
msg += self.mw.errorHandler.tempFolderMsg()
else:
msg += unicode(traceback.format_exc(), "ascii", "replace")
showText(msg)
return
finally:
self.mw.progress.finish()
txt = _("Importing complete.") + "\n"
if self.importer.log:
txt += "\n".join(self.importer.log)
self.close()
showText(txt)
self.mw.reset()
def setupMappingFrame(self):
# qt seems to have a bug with adding/removing from a grid, so we add
# to a separate object and add/remove that instead
self.frame = QFrame(self.frm.mappingArea)
self.frm.mappingArea.setWidget(self.frame)
self.mapbox = QVBoxLayout(self.frame)
self.mapbox.setContentsMargins(0,0,0,0)
self.mapwidget = None
def hideMapping(self):
self.frm.mappingGroup.hide()
def showMapping(self, keepMapping=False, hook=None):
if hook:
hook()
if not keepMapping:
self.mapping = self.importer.mapping
self.frm.mappingGroup.show()
assert self.importer.fields()
# set up the mapping grid
if self.mapwidget:
self.mapbox.removeWidget(self.mapwidget)
self.mapwidget.deleteLater()
self.mapwidget = QWidget()
self.mapbox.addWidget(self.mapwidget)
self.grid = QGridLayout(self.mapwidget)
self.mapwidget.setLayout(self.grid)
self.grid.setMargin(3)
self.grid.setSpacing(6)
fields = self.importer.fields()
for num in range(len(self.mapping)):
text = _("Field <b>%d</b> of file is:") % (num + 1)
self.grid.addWidget(QLabel(text), num, 0)
if self.mapping[num] == "_tags":
text = _("mapped to <b>Tags</b>")
elif self.mapping[num]:
text = _("mapped to <b>%s</b>") % self.mapping[num]
else:
text = _("<ignored>")
self.grid.addWidget(QLabel(text), num, 1)
button = QPushButton(_("Change"))
self.grid.addWidget(button, num, 2)
self.connect(button, SIGNAL("clicked()"),
lambda s=self,n=num: s.changeMappingNum(n))
def changeMappingNum(self, n):
f = ChangeMap(self.mw, self.importer.model, self.mapping[n]).getField()
try:
# make sure we don't have it twice
index = self.mapping.index(f)
self.mapping[index] = None
except ValueError:
pass
self.mapping[n] = f
if getattr(self.importer, "delimiter", False):
self.savedDelimiter = self.importer.delimiter
def updateDelim():
self.importer.delimiter = self.savedDelimiter
self.showMapping(hook=updateDelim, keepMapping=True)
else:
self.showMapping(keepMapping=True)
def reject(self):
self.modelChooser.cleanup()
remHook("currentModelChanged", self.modelChanged)
QDialog.reject(self)
def helpRequested(self):
openHelp("importing")
def showUnicodeWarning():
"""Shorthand to show a standard warning."""
showWarning(_(
"Selected file was not in UTF-8 format. Please see the "
"importing section of the manual."))
def onImport(mw):
filt = ";;".join([x[0] for x in importing.Importers])
file = getFile(mw, _("Import"), None, key="import",
filter=filt)
if not file:
return
file = unicode(file)
importFile(mw, file)
def importFile(mw, file):
importer = None
done = False
for i in importing.Importers:
if done:
break
for mext in re.findall("[( ]?\*\.(.+?)[) ]", i[0]):
if file.endswith("." + mext):
importer = i[1]
done = True
break
if not importer:
# if no matches, assume TSV
importer = importing.Importers[0][1]
importer = importer(mw.col, file)
# need to show import dialog?
if importer.needMapper:
# make sure we can load the file first
mw.progress.start(immediate=True)
try:
importer.open()
except UnicodeDecodeError:
showUnicodeWarning()
return
except Exception, e:
msg = repr(str(e))
if msg == "'unknownFormat'":
if file.endswith(".anki2"):
showWarning(_("""\
.anki2 files are not designed for importing. If you're trying to restore from a \
backup, please see the 'Backups' section of the user manual."""))
else:
showWarning(_("Unknown file format."))
else:
msg = _("Import failed. Debugging info:\n")
msg += unicode(traceback.format_exc(), "ascii", "replace")
showText(msg)
return
finally:
mw.progress.finish()
diag = ImportDialog(mw, importer)
else:
# if it's an apkg/zip, first test it's a valid file
if importer.__class__.__name__ == "AnkiPackageImporter":
try:
z = zipfile.ZipFile(importer.file)
z.getinfo("collection.anki2")
except:
showWarning(invalidZipMsg())
return
# we need to ask whether to import/replace
if not setupApkgImport(mw, importer):
return
mw.progress.start(immediate=True)
try:
importer.run()
except zipfile.BadZipfile:
showWarning(invalidZipMsg())
except Exception, e:
err = repr(str(e))
if "invalidFile" in err:
msg = _("""\
Invalid file. Please restore from backup.""")
showWarning(msg)
elif "invalidTempFolder" in err:
showWarning(mw.errorHandler.tempFolderMsg())
elif "readonly" in err:
showWarning(_("""\
Unable to import from a read-only file."""))
else:
msg = _("Import failed.\n")
msg += unicode(traceback.format_exc(), "ascii", "replace")
showText(msg)
else:
log = "\n".join(importer.log)
if "\n" not in log:
tooltip(log)
else:
showText(log)
finally:
mw.progress.finish()
mw.reset()
def invalidZipMsg():
return _("""\
This file does not appear to be a valid .apkg file. If you're getting this \
error from a file downloaded from AnkiWeb, chances are that your download \
failed. Please try again, and if the problem persists, please try again \
with a different browser.""")
def setupApkgImport(mw, importer):
base = os.path.basename(importer.file).lower()
full = (base == "collection.apkg") or re.match("backup-.*\\.apkg", base)
if not full:
# adding
return True
backup = re.match("backup-.*\\.apkg", base)
if not askUser(_("""\
This will delete your existing collection and replace it with the data in \
the file you're importing. Are you sure?"""), msgfunc=QMessageBox.warning):
return False
# schedule replacement; don't do it immediately as we may have been
# called as part of the startup routine
mw.progress.start(immediate=True)
mw.progress.timer(
100, lambda mw=mw, f=importer.file: replaceWithApkg(mw, f, backup), False)
def replaceWithApkg(mw, file, backup):
# unload collection, which will also trigger a backup
mw.unloadCollection()
# overwrite collection
z = zipfile.ZipFile(file)
try:
z.extract("collection.anki2", mw.pm.profileFolder())
except:
showWarning(_("The provided file is not a valid .apkg file."))
return
# because users don't have a backup of media, it's safer to import new
# data and rely on them running a media db check to get rid of any
# unwanted media. in the future we might also want to deduplicate this
# step
d = os.path.join(mw.pm.profileFolder(), "collection.media")
for c, file in json.loads(z.read("media")).items():
open(os.path.join(d, file), "wb").write(z.read(str(c)))
z.close()
# reload
mw.loadCollection()
if backup:
mw.col.modSchema(check=False)
mw.progress.finish()
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/importing.py
|
importing.py
|
from aqt.qt import *
import aqt
from aqt.utils import showInfo, openHelp, getOnlyText, shortcut, restoreGeom, saveGeom
from anki.hooks import addHook, remHook
class StudyDeck(QDialog):
def __init__(self, mw, names=None, accept=None, title=None,
help="studydeck", current=None, cancel=True,
parent=None, dyn=False, buttons=[], geomKey="default"):
QDialog.__init__(self, parent or mw)
self.mw = mw
self.form = aqt.forms.studydeck.Ui_Dialog()
self.form.setupUi(self)
self.form.filter.installEventFilter(self)
self.cancel = cancel
addHook('reset', self.onReset)
self.geomKey = "studyDeck-"+geomKey
restoreGeom(self, self.geomKey)
if not cancel:
self.form.buttonBox.removeButton(
self.form.buttonBox.button(QDialogButtonBox.Cancel))
if buttons:
for b in buttons:
self.form.buttonBox.addButton(b, QDialogButtonBox.ActionRole)
else:
b = QPushButton(_("Add"))
b.setShortcut(QKeySequence("Ctrl+N"))
b.setToolTip(shortcut(_("Add New Deck (Ctrl+N)")))
self.form.buttonBox.addButton(b, QDialogButtonBox.ActionRole)
b.connect(b, SIGNAL("clicked()"), self.onAddDeck)
if title:
self.setWindowTitle(title)
if not names:
names = sorted(self.mw.col.decks.allNames(dyn=dyn))
self.nameFunc = None
self.origNames = names
else:
self.nameFunc = names
self.origNames = names()
self.name = None
self.ok = self.form.buttonBox.addButton(
accept or _("Study"), QDialogButtonBox.AcceptRole)
self.setWindowModality(Qt.WindowModal)
self.connect(self.form.buttonBox,
SIGNAL("helpRequested()"),
lambda: openHelp(help))
self.connect(self.form.filter,
SIGNAL("textEdited(QString)"),
self.redraw)
self.connect(self.form.list,
SIGNAL("itemDoubleClicked(QListWidgetItem*)"),
self.accept)
self.show()
# redraw after show so position at center correct
self.redraw("", current)
self.exec_()
def eventFilter(self, obj, evt):
if evt.type() == QEvent.KeyPress:
if evt.key() == Qt.Key_Up:
c = self.form.list.count()
row = self.form.list.currentRow() - 1
if row < 0:
row = c - 1
self.form.list.setCurrentRow(row)
return True
elif evt.key() == Qt.Key_Down:
c = self.form.list.count()
row = self.form.list.currentRow() + 1
if row == c:
row = 0
self.form.list.setCurrentRow(row)
return True
return False
def redraw(self, filt, focus=None):
self.filt = filt
self.focus = focus
self.names = [n for n in self.origNames if self._matches(n, filt)]
l = self.form.list
l.clear()
l.addItems(self.names)
if focus in self.names:
idx = self.names.index(focus)
else:
idx = 0
l.setCurrentRow(idx)
l.scrollToItem(l.item(idx), QAbstractItemView.PositionAtCenter)
def _matches(self, name, filt):
name = name.lower()
filt = filt.lower()
if not filt:
return True
for word in filt.split(" "):
if word not in name:
return False
return True
def onReset(self):
# model updated?
if self.nameFunc:
self.origNames = self.nameFunc()
self.redraw(self.filt, self.focus)
def accept(self):
saveGeom(self, self.geomKey)
remHook('reset', self.onReset)
row = self.form.list.currentRow()
if row < 0:
showInfo(_("Please select something."))
return
self.name = self.names[self.form.list.currentRow()]
QDialog.accept(self)
def reject(self):
saveGeom(self, self.geomKey)
remHook('reset', self.onReset)
if not self.cancel:
return self.accept()
QDialog.reject(self)
def onAddDeck(self):
row = self.form.list.currentRow()
if row < 0:
default = self.form.filter.text()
else:
default = self.names[self.form.list.currentRow()]
n = getOnlyText(_("New deck name:"), default=default)
if n:
self.mw.col.decks.id(n)
self.name = n
# make sure we clean up reset hook when manually exiting
remHook('reset', self.onReset)
QDialog.accept(self)
|
AnkiServer
|
/AnkiServer-2.0.6.tar.gz/AnkiServer-2.0.6/anki-bundled/aqt/studydeck.py
|
studydeck.py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.