prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>model_list_view.go<|end_file_name|><|fim▁begin|>/*
* Swaggy Jenkins
*
* Jenkins API clients generated from Swagger / Open API specification
*<|fim▁hole|> * Generated by: OpenAPI Generator (https://openapi-generator.tech)
*/
package openapi
// ListView struct for ListView
type ListView struct {
Class string `json:"_class,omitempty"`
Description string `json:"description,omitempty"`
Jobs []FreeStyleProject `json:"jobs,omitempty"`
Name string `json:"name,omitempty"`
Url string `json:"url,omitempty"`
}<|fim▁end|> | * API version: 1.1.2-pre.0
* Contact: [email protected] |
<|file_name|>qbittorrent_fr.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.0" language="fr">
<context>
<name>AboutDlg</name>
<message>
<location filename="../gui/about.ui" line="21"/>
<source>About qBittorrent</source>
<translation>À propos de qBittorrent</translation>
</message>
<message>
<location filename="../gui/about.ui" line="83"/>
<source>About</source>
<translation>À propos</translation>
</message>
<message>
<location filename="../gui/about.ui" line="128"/>
<source>Author</source>
<translation>Auteur</translation>
</message>
<message>
<location filename="../gui/about.ui" line="216"/>
<location filename="../gui/about.ui" line="293"/>
<source>Name:</source>
<translation>Nom :</translation>
</message>
<message>
<location filename="../gui/about.ui" line="240"/>
<location filename="../gui/about.ui" line="281"/>
<source>Country:</source>
<translation>Pays :</translation>
</message>
<message>
<location filename="../gui/about.ui" line="228"/>
<location filename="../gui/about.ui" line="312"/>
<source>E-mail:</source>
<translation>Courriel :</translation>
</message>
<message>
<location filename="../gui/about.ui" line="262"/>
<source>Greece</source>
<translation>Grèce</translation>
</message>
<message>
<location filename="../gui/about.ui" line="341"/>
<source>Current maintainer</source>
<translation>Mainteneur actuel</translation>
</message>
<message>
<location filename="../gui/about.ui" line="354"/>
<source>Original author</source>
<translation>Auteur original</translation>
</message>
<message>
<location filename="../gui/about.ui" line="412"/>
<source>Libraries</source>
<translation>Bibliothèques</translation>
</message>
<message>
<location filename="../gui/about.ui" line="424"/>
<source>This version of qBittorrent was built against the following libraries:</source>
<translation>Cette version de qBittorrent utilise les bibliothèques suivantes :</translation>
</message>
<message>
<location filename="../gui/about.ui" line="184"/>
<source>France</source>
<translation>France</translation>
</message>
<message>
<location filename="../gui/about.ui" line="382"/>
<source>Translation</source>
<translation>Traduction</translation>
</message>
<message>
<location filename="../gui/about.ui" line="399"/>
<source>License</source>
<translation>Licence</translation>
</message>
<message>
<location filename="../gui/about.ui" line="365"/>
<source>Thanks to</source>
<translation>Remerciements</translation>
</message>
</context>
<context>
<name>AddNewTorrentDialog</name>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="29"/>
<source>Save as</source>
<translation>Enregistrer sous</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="53"/>
<source>Browse...</source>
<translation>Parcourir...</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="62"/>
<source>Set as default save path</source>
<translation>Utiliser comme dossier de sauvegarde par défaut</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="72"/>
<source>Never show again</source>
<translation>Ne plus afficher</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="89"/>
<source>Torrent settings</source>
<translation>Paramètres du torrent</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="95"/>
<source>Start torrent</source>
<translation>Démarrer le torrent</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="107"/>
<source>Label:</source>
<translation>Catégorie :</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="126"/>
<source>Skip hash check</source>
<translation>Ne pas vérifier les données du torrent</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="136"/>
<source>Torrent Information</source>
<translation>Informations sur le torrent</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="144"/>
<source>Size:</source>
<translation>Taille :</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="158"/>
<source>Comment:</source>
<translation>Commentaire :</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="184"/>
<source>Date:</source>
<translation>Date :</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="198"/>
<source>Info Hash:</source>
<translation>Info hachage :</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="289"/>
<source>Normal</source>
<translation>Normale</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="294"/>
<source>High</source>
<translation>Haute</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="299"/>
<source>Maximum</source>
<translation>Maximale</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.ui" line="304"/>
<source>Do not download</source>
<translation>Ne pas télécharger</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="165"/>
<location filename="../gui/addnewtorrentdialog.cpp" line="636"/>
<source>I/O Error</source>
<translation>Erreur E/S</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="165"/>
<source>The torrent file does not exist.</source>
<translation>Le fichier torrent n'existe pas.</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="173"/>
<source>Invalid torrent</source>
<translation>Torrent invalide</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="173"/>
<source>Failed to load the torrent: %1</source>
<translation>Impossible de charger le torrent : %1</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="185"/>
<location filename="../gui/addnewtorrentdialog.cpp" line="213"/>
<source>Already in download list</source>
<translation>Déjà présent dans la liste des téléchargements</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="333"/>
<source>Free disk space: %1</source>
<translation>Espace disque libre : %1</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="662"/>
<source>Not Available</source>
<comment>This comment is unavailable</comment>
<translation>Non disponible</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="663"/>
<source>Not Available</source>
<comment>This date is unavailable</comment>
<translation>Non disponible</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="671"/>
<source>Not available</source>
<translation>Non disponible</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="202"/>
<source>Invalid magnet link</source>
<translation>Lien magnet invalide</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="185"/>
<source>Torrent is already in download list. Trackers were merged.</source>
<translation>Impossible d'ajouter le torrent</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="188"/>
<location filename="../gui/addnewtorrentdialog.cpp" line="216"/>
<source>Cannot add torrent</source>
<translation>Impossible d'ajouter le torrent</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="188"/>
<source>Cannot add this torrent. Perhaps it is already in adding state.</source>
<translation>Impossible d'ajouter ce torrent. Peut-être est-il déjà en cours d'ajout.</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="202"/>
<source>This magnet link was not recognized</source>
<translation>Ce lien magnet n'a pas été reconnu</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="213"/>
<source>Magnet link is already in download list. Trackers were merged.</source>
<translation>Le lien magnet est déjà dans la liste des téléchargements. Les trackers ont été fusionnés.</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="216"/>
<source>Cannot add this torrent. Perhaps it is already in adding.</source>
<translation>Impossible d'ajouter ce torrent. Peut-être est-il déjà en cours d'ajout.</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="225"/>
<source>Magnet link</source>
<translation>Lien magnet</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="232"/>
<source>Retrieving metadata...</source>
<translation>Récupération des métadonnées…</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="331"/>
<source>Not Available</source>
<comment>This size is unavailable.</comment>
<translation>Non disponible</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="362"/>
<location filename="../gui/addnewtorrentdialog.cpp" line="370"/>
<location filename="../gui/addnewtorrentdialog.cpp" line="372"/>
<source>Choose save path</source>
<translation>Choisir un répertoire de destination</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="421"/>
<source>Rename the file</source>
<translation>Renommer le fichier</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="422"/>
<source>New name:</source>
<translation>Nouveau nom :</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="426"/>
<location filename="../gui/addnewtorrentdialog.cpp" line="451"/>
<source>The file could not be renamed</source>
<translation>Le fichier n'a pas pu être renommé</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="427"/>
<source>This file name contains forbidden characters, please choose a different one.</source>
<translation>Ce nom de fichier contient des caractères interdits, veuillez en choisir un autre.</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="452"/>
<location filename="../gui/addnewtorrentdialog.cpp" line="485"/>
<source>This name is already in use in this folder. Please use a different name.</source>
<translation>Ce nom de fichier est déjà utilisé dans ce dossier. Veuillez utiliser un autre nom.</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="484"/>
<source>The folder could not be renamed</source>
<translation>Le dossier n'a pas pu être renommé</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="541"/>
<source>Rename...</source>
<translation>Renommer…</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="545"/>
<source>Priority</source>
<translation>Priorité</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="637"/>
<source>Invalid metadata</source>
<translation>Metadata invalides.</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="644"/>
<source>Parsing metadata...</source>
<translation>Analyse syntaxique des métadonnées...</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="648"/>
<source>Metadata retrieval complete</source>
<translation>Récuperation des métadonnées terminée</translation>
</message>
<message>
<location filename="../gui/addnewtorrentdialog.cpp" line="708"/>
<source>Download Error</source>
<translation>Erreur de téléchargement</translation>
</message>
</context>
<context>
<name>AdvancedSettings</name>
<message>
<location filename="../gui/advancedsettings.h" line="219"/>
<source>Disk write cache size</source>
<translation>Taille du cache disque</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="201"/>
<source> MiB</source>
<translation>Mio</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="239"/>
<source>Outgoing ports (Min) [0: Disabled]</source>
<translation>Ports sortants (min) [0: désactivé]</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="244"/>
<source>Outgoing ports (Max) [0: Disabled]</source>
<translation>Ports sortants (max) [0: désactivé]</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="247"/>
<source>Recheck torrents on completion</source>
<translation>Revérifier les torrents lorsqu'ils sont terminés</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="253"/>
<source>Transfer list refresh interval</source>
<translation>Intervalle d'actualisation de la liste de transfert</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="252"/>
<source> ms</source>
<comment> milliseconds</comment>
<translation>ms</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="80"/>
<source>Setting</source>
<translation>Paramètre</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="80"/>
<source>Value</source>
<comment>Value set for this setting</comment>
<translation>Valeur</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="199"/>
<source> (auto)</source>
<translation>(automatique)</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="224"/>
<source> s</source>
<comment> seconds</comment>
<translation>s</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="225"/>
<source>Disk cache expiry interval</source>
<translation>Intervalle de l'expiration du cache disque</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="228"/>
<source>Enable OS cache</source>
<translation>Activer le cache du système d’exploitation</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="233"/>
<source> m</source>
<comment> minutes</comment>
<translation>m</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="256"/>
<source>Resolve peer countries (GeoIP)</source>
<translation>Afficher le pays des pairs (GeoIP)</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="259"/>
<source>Resolve peer host names</source>
<translation>Afficher le nom d'hôte des pairs</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="264"/>
<source>Maximum number of half-open connections [0: Disabled]</source>
<translation>Nombre maximum de connexions à moitié ouvertes [0: désactivé]</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="267"/>
<source>Strict super seeding</source>
<translation>Super-partage strict</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="287"/>
<source>Network Interface (requires restart)</source>
<translation>Interface réseau (redémarrage requis)</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="290"/>
<source>Listen on IPv6 address (requires restart)</source>
<translation>Écouter sur l’adresse IPv6 (redémarrage requis)</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="315"/>
<source>Confirm torrent recheck</source>
<translation>Confirmer la revérification du torrent</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="318"/>
<source>Exchange trackers with other peers</source>
<translation>Échanger les trackers avec d'autres pairs</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="321"/>
<source>Always announce to all trackers</source>
<translation>Toujours contacter tous les trackers</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="269"/>
<source>Any interface</source>
<comment>i.e. Any network interface</comment>
<translation>N'importe quelle interface</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="234"/>
<source>Save resume data interval</source>
<comment>How often the fastresume file is saved.</comment>
<translation>Intervalle de sauvegarde des données de reprise</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="293"/>
<source>IP Address to report to trackers (requires restart)</source>
<translation>Adresse IP annoncée aux trackers (Redémarrage requis)</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="296"/>
<source>Display program on-screen notifications</source>
<translation>Afficher les messages de notification à l'écran</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="299"/>
<source>Enable embedded tracker</source>
<translation>Activer le tracker intégré</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="304"/>
<source>Embedded tracker port</source>
<translation>Port du tracker intégré</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="307"/>
<source>Check for software updates</source>
<translation>Vérifier les mises à jour du logiciel</translation>
</message>
<message>
<location filename="../gui/advancedsettings.h" line="311"/>
<source>Use system icon theme</source>
<translation>Utiliser le thème d'icônes du système</translation>
</message>
</context>
<context>
<name>Application</name>
<message>
<location filename="../app/application.cpp" line="105"/>
<source>qBittorrent %1 started</source>
<comment>qBittorrent v3.2.0alpha started</comment>
<translation>qBittorrent %1 démarré.</translation>
</message>
<message>
<location filename="../app/application.cpp" line="262"/>
<source>Information</source>
<translation>Information</translation>
</message>
<message>
<location filename="../app/application.cpp" line="263"/>
<source>To control qBittorrent, access the Web UI at http://localhost:%1</source>
<translation>Pour contrôler qBittorrent, accédez à l'interface web via http://localhost:%1</translation>
</message>
<message>
<location filename="../app/application.cpp" line="264"/>
<source>The Web UI administrator user name is: %1</source>
<translation>Le nom d'utilisateur de l'administrateur de l'interface web est : %1</translation>
</message>
<message>
<location filename="../app/application.cpp" line="267"/>
<source>The Web UI administrator password is still the default one: %1</source>
<translation>Le mot de passe de l'administrateur de l'interface web est toujours celui par défaut : %1</translation>
</message>
<message>
<location filename="../app/application.cpp" line="268"/>
<source>This is a security risk, please consider changing your password from program preferences.</source>
<translation>Ceci peut être dangereux, veuillez penser à changer votre mot de passe dans les options.</translation>
</message>
<message>
<location filename="../app/application.cpp" line="442"/>
<source>Saving torrent progress...</source>
<translation>Sauvegarde de l'avancement du torrent.</translation>
</message>
</context>
<context>
<name>AutomatedRssDownloader</name>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="208"/>
<source>Save to:</source>
<translation>Sauvegarder sous :</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="14"/>
<source>RSS Downloader</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="26"/>
<source>Enable Automated RSS Downloader</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="48"/>
<source>Download Rules</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="88"/>
<source>Rule Definition</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="94"/>
<source>Use Regular Expressions</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="103"/>
<source>Must Contain:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="110"/>
<source>Must Not Contain:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="117"/>
<source>Episode Filter:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="180"/>
<source>Assign Label:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="196"/>
<source>Save to a Different Directory</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="236"/>
<source>Ignore Subsequent Matches for (0 to Disable)</source>
<comment>... X days</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="246"/>
<source> days</source>
<translation>jours</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="276"/>
<source>Add Paused:</source>
<translation>Ajouter en pause :</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="284"/>
<source>Use global settings</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="289"/>
<source>Always</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="294"/>
<source>Never</source>
<translation type="unfinished">Jamais</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="315"/>
<source>Apply Rule to Feeds:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="337"/>
<source>Matching RSS Articles</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="362"/>
<source>&Import...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.ui" line="369"/>
<source>&Export...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="77"/>
<source>Matches articles based on episode filter.</source>
<translation>Articles correspondants basés sur le filtrage épisode</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="77"/>
<source>Example: </source>
<translation>Exemple :</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="78"/>
<source> will match 2, 5, 8 through 15, 30 and onward episodes of season one</source>
<comment>example X will match</comment>
<translation>correspondra aux épisodes 2, 5, 8 et 15-30 et supérieurs de la saison 1</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="79"/>
<source>Episode filter rules: </source>
<translation>Règles de filtrage d'épisodes :</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="79"/>
<source>Season number is a mandatory non-zero value</source>
<translation>Le numéro de saison est une valeur obligatoire différente de zéro</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="80"/>
<source>Episode number is a mandatory non-zero value</source>
<translation>Le numéro d'épisode est une valeur obligatoire différente de zéro</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="81"/>
<source>Filter must end with semicolon</source>
<translation>Le filtre doit se terminer avec un point-virgule</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="82"/>
<source>Three range types for episodes are supported: </source>
<translation>Trois types d'intervalles d'épisodes sont pris en charge :</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="83"/>
<source>Single number: <b>1x25;</b> matches episode 25 of season one</source>
<translation>Nombre simple : <b>1×25;</b> correspond à l'épisode 25 de la saison 1</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="84"/>
<source>Normal range: <b>1x25-40;</b> matches episodes 25 through 40 of season one</source>
<translation>Intervalle standard : <b>1×25-40;</b> correspond aux épisodes 25 à 40 de la saison 1</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="85"/>
<source>Infinite range: <b>1x25-;</b> matches episodes 25 and upward of season one</source>
<translation>Intervalle infinie : <b>1×25-;</b> correspond aux épisodes 25 et suivants de la saison 1</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="266"/>
<source>Last Match: %1 days ago</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="268"/>
<source>Last Match: Unknown</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="361"/>
<source>New rule name</source>
<translation>Nouveau nom pour la règle</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="361"/>
<source>Please type the name of the new download rule.</source>
<translation>Veuillez entrer le nom de la nouvelle règle de téléchargement.</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="365"/>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="483"/>
<source>Rule name conflict</source>
<translation>Conflit dans les noms de règle</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="365"/>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="483"/>
<source>A rule with this name already exists, please choose another name.</source>
<translation>Une règle avec ce nom existe déjà, veuillez en choisir un autre.</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="383"/>
<source>Are you sure you want to remove the download rule named '%1'?</source>
<translation>Êtes vous certain de vouloir supprimer la règle de téléchargement '%1'</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="385"/>
<source>Are you sure you want to remove the selected download rules?</source>
<translation>Voulez-vous vraiment supprimer les règles sélectionnées ?</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="386"/>
<source>Rule deletion confirmation</source>
<translation>Confirmation de la suppression</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="402"/>
<source>Destination directory</source>
<translation>Répertoire de destination</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="410"/>
<source>Invalid action</source>
<translation>Action invalide</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="410"/>
<source>The list is empty, there is nothing to export.</source>
<translation>La liste est vide, il n'y a rien à exporter.</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="414"/>
<source>Where would you like to save the list?</source>
<translation>Où désirez-vous sauvegarder cette liste ?</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="414"/>
<source>Rules list (*.rssrules)</source>
<translation>Liste de règles (*.rssrules)</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="419"/>
<source>I/O Error</source>
<translation>Erreur E/S</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="419"/>
<source>Failed to create the destination file</source>
<translation>Impossible de créer le fichier de destination</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="427"/>
<source>Please point to the RSS download rules file</source>
<translation>Veuillez indiquer le fichier contenant les règles de téléchargement RSS</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="427"/>
<source>Rules list</source>
<translation>Liste des règles</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="431"/>
<source>Import Error</source>
<translation>Erreur lors de l'importation</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="431"/>
<source>Failed to import the selected rules file</source>
<translation>Impossible d'importer le fichier de règles sélectionné</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="442"/>
<source>Add new rule...</source>
<translation>Ajouter une nouvelle règle…</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="448"/>
<source>Delete rule</source>
<translation>Supprimer la règle</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="450"/>
<source>Rename rule...</source>
<translation>Renommer la règle…</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="452"/>
<source>Delete selected rules</source>
<translation>Supprimer les règles sélectionnées</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="479"/>
<source>Rule renaming</source>
<translation>Renommage de la règle</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="479"/>
<source>Please type the new rule name</source>
<translation>Veuillez enter le nouveau nom pour la règle</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="581"/>
<source>Regex mode: use Perl-like regular expressions</source>
<translation>Mode regex : utiliser des expressions régulières similaires à celles de Perl</translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="585"/>
<source>Wildcard mode: you can use<ul><li>? to match any single character</li><li>* to match zero or more of any characters</li><li>Whitespaces count as AND operators</li></ul></source>
<translation>Mode simplifié : vous pouvez utiliser<ul><li>? pour remplacer n'importe quel caractère</li><li>* pour remplacer zéro ou plusieurs caractères</li><li>Les espaces sont considérés équivalent à des opérateurs ET</li></ul></translation>
</message>
<message>
<location filename="../gui/rss/automatedrssdownloader.cpp" line="587"/>
<source>Wildcard mode: you can use<ul><li>? to match any single character</li><li>* to match zero or more of any characters</li><li>| is used as OR operator</li></ul></source>
<translation>Mode simplifié : vous pouvez utiliser<ul><li>? pour remplacer n'importe quel caractère</li><li>* pour remplacer zéro ou plusieurs caractères</li><li>| est utilisé comme opérateur OU</li></ul></translation>
</message>
</context>
<context>
<name>BitTorrent::Session</name>
<message>
<location filename="../core/bittorrent/session.cpp" line="173"/>
<source>Peer ID: </source>
<translation>ID du pair :</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="316"/>
<source>HTTP User-Agent is '%1'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="343"/>
<source>Anonymous mode [ON]</source>
<translation>Mode anonyme [ACTIVE]</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="345"/>
<source>Anonymous mode [OFF]</source>
<translation>Mode anonyme [DESACTIVE]</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="535"/>
<source>PeX support [ON]</source>
<translation>Prise en charge de PeX [ACTIVÉE]</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="537"/>
<source>PeX support [OFF]</source>
<translation>Prise en charge de PeX [DÉSACTIVÉE]</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="539"/>
<source>Restart is required to toggle PeX support</source>
<translation>Un redémarrage est nécessaire pour changer le support PeX</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="544"/>
<source>Local Peer Discovery support [ON]</source>
<translation>Découverte de pairs sur le réseau local [ACTIVÉE]</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="548"/>
<source>Local Peer Discovery support [OFF]</source>
<translation>Découverte de pairs sur le réseau local [DÉSACTIVÉE]</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="561"/>
<source>Encryption support [ON]</source>
<translation>Support de cryptage [ACTIVE]</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="566"/>
<source>Encryption support [FORCED]</source>
<translation>Support de cryptage [FORCE]</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="571"/>
<source>Encryption support [OFF]</source>
<translation>Support de cryptage [DESACTIVE]</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="649"/>
<source>Embedded Tracker [ON]</source>
<translation>Tracker intégré [ACTIVÉ]</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="651"/>
<source>Failed to start the embedded tracker!</source>
<translation>Impossible de démarrer le tracker intégré !</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="654"/>
<source>Embedded Tracker [OFF]</source>
<translation>Tracker intégré [DÉSACTIVÉ]</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="692"/>
<source>'%1' reached the maximum ratio you set. Removing...</source>
<translation>'%1' a atteint le ratio maximum que vous avez défini. Suppression...</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="698"/>
<source>'%1' reached the maximum ratio you set. Pausing...</source>
<translation>'%1' a atteint le ratio maximum que vous avez défini. Mise en pause...</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1175"/>
<source>Error: Could not create torrent export directory: '%1'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1203"/>
<source>Error: could not export torrent '%1', maybe it has not metadata yet.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1430"/>
<source>System network status changed to %1</source>
<comment>e.g: System network status changed to ONLINE</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1430"/>
<source>ONLINE</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1430"/>
<source>OFFLINE</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1438"/>
<source>Network configuration of %1 has changed, refreshing session binding</source>
<comment>e.g: Network configuration of tun0 has changed, refreshing session binding</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1721"/>
<source>Unable to decode '%1' torrent file.</source>
<translation>Impossible de décoder le fichier torrent '%1'</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1827"/>
<source>Recursive download of file '%1' embedded in torrent '%2'</source>
<comment>Recursive download of 'test.torrent' embedded in torrent 'test2'</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2138"/>
<source>Couldn't save '%1.torrent'</source>
<translation>Impossible de sauvegarder '%1.torrent"</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2240"/>
<source>because %1 is disabled.</source>
<comment>this peer was blocked because uTP is disabled.</comment>
<translation>parce que '%1' est désactivé</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2243"/>
<source>because %1 is disabled.</source>
<comment>this peer was blocked because TCP is disabled.</comment>
<translation>parce que '%1' est désactivé</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2261"/>
<source>URL seed lookup failed for URL: '%1', message: %2</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="811"/>
<source>'%1' was removed from transfer list and hard disk.</source>
<comment>'xxx.avi' was removed...</comment>
<translation>'%1' a été supprimé de la liste de transferts et du disque.</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="813"/>
<source>'%1' was removed from transfer list.</source>
<comment>'xxx.avi' was removed...</comment>
<translation>'%1' a été supprimé de la liste de transferts.</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="972"/>
<source>Downloading '%1', please wait...</source>
<comment>e.g: Downloading 'xxx.torrent', please wait...</comment>
<translation>Téléchargement de '%1', veuillez patienter...</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1183"/>
<source>Torrent Export: torrent is invalid, skipping...</source>
<translation>Exportation de torrents : le torrent est invalide et a été ignoré...</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1276"/>
<source>DHT support [ON]</source>
<translation>Prise en charge de DHT [ACTIVÉE]</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1281"/>
<source>DHT support [OFF]. Reason: %1</source>
<translation>Prise en charge de DHT [DÉSACTIVÉE]. Motif : %1</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1289"/>
<source>DHT support [OFF]</source>
<translation>Prise en charge de DHT [DÉSACTIVÉE]</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="165"/>
<location filename="../core/bittorrent/session.cpp" line="1508"/>
<source>qBittorrent is trying to listen on any interface port: %1</source>
<comment>e.g: qBittorrent is trying to listen on any interface port: TCP/6881</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1512"/>
<source>qBittorrent failed to listen on any interface port: %1. Reason: %2</source>
<comment>e.g: qBittorrent failed to listen on any interface port: TCP/6881. Reason: no such interface</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1461"/>
<source>The network interface defined is invalid: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="169"/>
<location filename="../core/bittorrent/session.cpp" line="1519"/>
<source>qBittorrent is trying to listen on interface %1 port: %2</source>
<comment>e.g: qBittorrent is trying to listen on interface 192.168.0.1 port: TCP/6881</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1485"/>
<source>qBittorrent didn't find an %1 local address to listen on</source>
<comment>qBittorrent didn't find an IPv4 local address to listen on</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1633"/>
<source>Tracker '%1' was added to torrent '%2'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1643"/>
<source>Tracker '%1' was deleted from torrent '%2'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1658"/>
<source>URL seed '%1' was added to torrent '%2'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1664"/>
<source>URL seed '%1' was removed from torrent '%2'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1943"/>
<source>Unable to resume torrent '%1'.</source>
<comment>e.g: Unable to resume torrent 'hash'.</comment>
<translation>Impossible de résumer le torrent "%1".</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1966"/>
<source>Successfully parsed the provided IP filter: %1 rules were applied.</source>
<comment>%1 is a number</comment>
<translation type="unfinished">Le filtre IP a été correctement chargé : %1 règles ont été appliquées.</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="1972"/>
<source>Error: Failed to parse the provided IP filter.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2100"/>
<source>Couldn't add torrent. Reason: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2121"/>
<source>'%1' resumed. (fast resume)</source>
<comment>'torrent name' was resumed. (fast resume)</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2152"/>
<source>'%1' added to download list.</source>
<comment>'torrent name' was added to download list.</comment>
<translation>'%1' ajouté à la liste de téléchargement.</translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2204"/>
<source>An I/O error occurred, '%1' paused. %2</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2212"/>
<source>UPnP/NAT-PMP: Port mapping failure, message: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2218"/>
<source>UPnP/NAT-PMP: Port mapping successful, message: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2228"/>
<source>due to IP filter.</source>
<comment>this peer was blocked due to ip filter.</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2231"/>
<source>due to port filter.</source>
<comment>this peer was blocked due to port filter.</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2234"/>
<source>due to i2p mixed mode restrictions.</source>
<comment>this peer was blocked due to i2p mixed mode restrictions.</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2237"/>
<source>because it has a low port.</source>
<comment>this peer was blocked because it has a low port.</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2275"/>
<source>qBittorrent is successfully listening on interface %1 port: %2/%3</source>
<comment>e.g: qBittorrent is successfully listening on interface 192.168.0.1 port: TCP/6881</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2301"/>
<source>qBittorrent failed listening on interface %1 port: %2/%3. Reason: %4</source>
<comment>e.g: qBittorrent failed listening on interface 192.168.0.1 port: TCP/6881. Reason: already in use</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/session.cpp" line="2310"/>
<source>External IP: %1</source>
<comment>e.g. External IP: 192.168.0.1</comment>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>BitTorrent::TorrentHandle</name>
<message>
<location filename="../core/bittorrent/torrenthandle.cpp" line="1315"/>
<source>Could not move torrent: '%1'. Reason: %2</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/torrenthandle.cpp" line="1456"/>
<source>File sizes mismatch for torrent '%1', pausing it.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/torrenthandle.cpp" line="1462"/>
<source>Fast resume data was rejected for torrent '%1'. Reason: %2. Checking again...</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>CookiesDlg</name>
<message>
<location filename="../gui/rss/cookiesdlg.ui" line="14"/>
<source>Cookies management</source>
<translation>Gestion des cookies</translation>
</message>
<message>
<location filename="../gui/rss/cookiesdlg.ui" line="36"/>
<source>Key</source>
<extracomment>As in Key/Value pair</extracomment>
<translation>Clé</translation>
</message>
<message>
<location filename="../gui/rss/cookiesdlg.ui" line="41"/>
<source>Value</source>
<extracomment>As in Key/Value pair</extracomment>
<translation>Valeur</translation>
</message>
<message>
<location filename="../gui/rss/cookiesdlg.cpp" line="48"/>
<source>Common keys for cookies are: '%1', '%2'.
You should get this information from your Web browser preferences.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>DeletionConfirmationDlg</name>
<message>
<location filename="../gui/deletionconfirmationdlg.h" line="48"/>
<source>Are you sure you want to delete '%1' from the transfer list?</source>
<comment>Are you sure you want to delete 'ubuntu-linux-iso' from the transfer list?</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/deletionconfirmationdlg.h" line="50"/>
<source>Are you sure you want to delete these %1 torrents from the transfer list?</source>
<comment>Are you sure you want to delete these 5 torrents from the transfer list?</comment>
<translation>Voulez-vous vraiment supprimer ces %1 torrents de la liste des transferts ?</translation>
</message>
</context>
<context>
<name>DownloadedPiecesBar</name>
<message>
<location filename="../gui/properties/downloadedpiecesbar.cpp" line="37"/>
<source>White: Missing pieces</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/downloadedpiecesbar.cpp" line="37"/>
<source>Green: Partial pieces</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/downloadedpiecesbar.cpp" line="37"/>
<source>Blue: Completed pieces</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>ExecutionLog</name>
<message>
<location filename="../gui/executionlog.ui" line="27"/>
<source>General</source>
<translation>Général</translation>
</message>
<message>
<location filename="../gui/executionlog.ui" line="33"/>
<source>Blocked IPs</source>
<translation>Adresses IP bloquées</translation>
</message>
<message>
<location filename="../gui/executionlog.cpp" line="101"/>
<source><font color='red'>%1</font> was blocked %2</source>
<comment>x.y.z.w was blocked</comment>
<translation><font color='red'>%1</font> a été bloqué %2</translation>
</message>
<message>
<location filename="../gui/executionlog.cpp" line="103"/>
<source><font color='red'>%1</font> was banned</source>
<comment>x.y.z.w was banned</comment>
<translation><font color='red'>%1</font> a été banni</translation>
</message>
</context>
<context>
<name>FeedListWidget</name>
<message>
<location filename="../gui/rss/feedlistwidget.cpp" line="41"/>
<source>RSS feeds</source>
<translation>Flux RSS</translation>
</message>
<message>
<location filename="../gui/rss/feedlistwidget.cpp" line="43"/>
<source>Unread</source>
<translation>Non lu</translation>
</message>
</context>
<context>
<name>FilterParserThread</name>
<message>
<location filename="../core/bittorrent/private/filterparserthread.cpp" line="65"/>
<location filename="../core/bittorrent/private/filterparserthread.cpp" line="159"/>
<location filename="../core/bittorrent/private/filterparserthread.cpp" line="267"/>
<source>I/O Error: Could not open ip filter file in read mode.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/bittorrent/private/filterparserthread.cpp" line="278"/>
<location filename="../core/bittorrent/private/filterparserthread.cpp" line="290"/>
<location filename="../core/bittorrent/private/filterparserthread.cpp" line="311"/>
<location filename="../core/bittorrent/private/filterparserthread.cpp" line="320"/>
<location filename="../core/bittorrent/private/filterparserthread.cpp" line="330"/>
<location filename="../core/bittorrent/private/filterparserthread.cpp" line="340"/>
<location filename="../core/bittorrent/private/filterparserthread.cpp" line="360"/>
<source>Parsing Error: The filter file is not a valid PeerGuardian P2B file.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>GeoIPDatabase</name>
<message>
<location filename="../core/net/private/geoipdatabase.cpp" line="101"/>
<location filename="../core/net/private/geoipdatabase.cpp" line="131"/>
<source>Unsupported database file size.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/private/geoipdatabase.cpp" line="236"/>
<source>Metadata error: '%1' entry not found.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/private/geoipdatabase.cpp" line="237"/>
<source>Metadata error: '%1' entry has invalid type.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/private/geoipdatabase.cpp" line="246"/>
<source>Unsupported database version: %1.%2</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/private/geoipdatabase.cpp" line="253"/>
<source>Unsupported IP version: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/private/geoipdatabase.cpp" line="260"/>
<source>Unsupported record size: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/private/geoipdatabase.cpp" line="273"/>
<source>Invalid database type: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/private/geoipdatabase.cpp" line="294"/>
<source>Database corrupted: no data section found.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>HttpServer</name>
<message>
<location filename="../webui/extra_translations.h" line="36"/>
<source>File</source>
<translation>Fichier</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="37"/>
<source>Edit</source>
<translation>Édition</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="38"/>
<source>Help</source>
<translation>Aide</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="40"/>
<source>Download Torrents from their URL or Magnet link</source>
<translation>Téléchargement de torrents depuis leur URL ou lien magnet</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="41"/>
<source>Only one link per line</source>
<translation>Un seul lien par ligne</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="42"/>
<source>Download local torrent</source>
<translation>Téléchargement d'un torrent local</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="43"/>
<source>Download</source>
<translation>Télécharger</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="45"/>
<source>Global upload rate limit must be greater than 0 or disabled.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="46"/>
<source>Global download rate limit must be greater than 0 or disabled.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="47"/>
<source>Alternative upload rate limit must be greater than 0 or disabled.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="48"/>
<source>Alternative download rate limit must be greater than 0 or disabled.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="49"/>
<source>Maximum active downloads must be greater than -1.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="50"/>
<source>Maximum active uploads must be greater than -1.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="51"/>
<source>Maximum active torrents must be greater than -1.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="52"/>
<source>Maximum number of connections limit must be greater than 0 or disabled.</source>
<translation>Le nombre maximum de connexions doit être supérieur à 0 ou désactivé.</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="53"/>
<source>Maximum number of connections per torrent limit must be greater than 0 or disabled.</source>
<translation>Le nombre maximum de connexions par torrent doit être supérieur à 0 ou désactivé.</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="54"/>
<source>Maximum number of upload slots per torrent limit must be greater than 0 or disabled.</source>
<translation>Le nombre maximum de slots d'envoi par torrent doit être supérieur à 0 ou désactivé.</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="55"/>
<source>Unable to save program preferences, qBittorrent is probably unreachable.</source>
<translation>Impossible de sauvegarder les préférences, qBittorrent est probablement injoignable.</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="56"/>
<source>Language</source>
<translation>Langue</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="57"/>
<source>The port used for incoming connections must be between 1 and 65535.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="58"/>
<source>The port used for the Web UI must be between 1 and 65535.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="68"/>
<source>Unable to log in, qBittorrent is probably unreachable.</source>
<translation>Impossible de se connecter, qBittorrent est probablement inaccessible.</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="69"/>
<source>Invalid Username or Password.</source>
<translation>Nom d'utilisateur ou mot de passe invalide.</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="70"/>
<source>Password</source>
<translation>Mot de passe</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="71"/>
<source>Login</source>
<translation>Identifiant</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="72"/>
<source>Upload Failed!</source>
<translation>Le transfert a échoué !</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="73"/>
<source>Original authors</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="74"/>
<source>Upload limit:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="75"/>
<source>Download limit:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="76"/>
<source>Apply</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="77"/>
<source>Add</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="78"/>
<source>Upload Torrents</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="79"/>
<source>All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="80"/>
<source>Downloading</source>
<translation type="unfinished">En téléchargement</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="81"/>
<source>Seeding</source>
<translation type="unfinished">En partage</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="82"/>
<source>Completed</source>
<translation type="unfinished">Terminé</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="83"/>
<source>Resumed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="84"/>
<source>Paused</source>
<translation type="unfinished">En pause</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="85"/>
<source>Active</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="86"/>
<source>Inactive</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="90"/>
<source>Downloaded</source>
<comment>Is the file downloaded or not?</comment>
<translation>Téléchargé</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="39"/>
<source>Logout</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="44"/>
<source>Are you sure you want to delete the selected torrents from the transfer list?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="59"/>
<source>The Web UI username must be at least 3 characters long.</source>
<translation>Le nom d'utilisateur pour l'interface eb doit contenir au moins trois caractères.</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="60"/>
<source>The Web UI password must be at least 3 characters long.</source><|fim▁hole|> <translation>Le mot de passe pour l'interface web doit contenir au moins trois caractères.</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="61"/>
<source>Save</source>
<translation>Sauvegarder</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="62"/>
<source>qBittorrent client is not reachable</source>
<translation>Le logiciel qBittorrent n'est pas accessible</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="63"/>
<source>HTTP Server</source>
<translation>Serveur HTTP</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="64"/>
<source>The following parameters are supported:</source>
<translation>Les paramètres suivants sont pris en charge :</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="65"/>
<source>Torrent path</source>
<translation>Chemin du torrent</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="66"/>
<source>Torrent name</source>
<translation>Nom du torrent</translation>
</message>
<message>
<location filename="../webui/extra_translations.h" line="67"/>
<source>qBittorrent has been shutdown.</source>
<translation>qBittorrent a été arrêté.</translation>
</message>
</context>
<context>
<name>LabelFiltersList</name>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="189"/>
<source>All (0)</source>
<comment>this is for the label filter</comment>
<translation>Toutes Catégories (0)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="192"/>
<source>Unlabeled (0)</source>
<translation>Sans Catégorie</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="214"/>
<location filename="../gui/transferlistfilterswidget.cpp" line="260"/>
<source>All (%1)</source>
<comment>this is for the label filter</comment>
<translation>Toutes Catégories (%1)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="217"/>
<location filename="../gui/transferlistfilterswidget.cpp" line="235"/>
<location filename="../gui/transferlistfilterswidget.cpp" line="263"/>
<location filename="../gui/transferlistfilterswidget.cpp" line="268"/>
<source>Unlabeled (%1)</source>
<translation>Sans Catégorie</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="239"/>
<location filename="../gui/transferlistfilterswidget.cpp" line="276"/>
<source>%1 (%2)</source>
<comment>label_name (10)</comment>
<translation>%1 (%2)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="330"/>
<source>Add label...</source>
<translation>Ajouter catégorie...</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="334"/>
<source>Remove label</source>
<translation>Supprimer catégorie</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="335"/>
<source>Remove unused labels</source>
<translation>Supprimer les catégories inutilisées</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="337"/>
<source>Resume torrents</source>
<translation>Démarrer les torrents</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="338"/>
<source>Pause torrents</source>
<translation>Mettre en pause les torrents</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="339"/>
<source>Delete torrents</source>
<translation>Supprimer les torrents</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="366"/>
<source>New Label</source>
<translation>Nouvelle catégorie</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="366"/>
<source>Label:</source>
<translation>Catégorie :</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="372"/>
<source>Invalid label name</source>
<translation>Nom de catégorie invalide</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="372"/>
<source>Please don't use any special characters in the label name.</source>
<translation>N'utilisez pas des caractères spéciaux dans le nom de catégorie.</translation>
</message>
</context>
<context>
<name>LineEdit</name>
<message>
<location filename="../gui/lineedit/src/lineedit.cpp" line="30"/>
<source>Clear the text</source>
<translation>Effacer le texte</translation>
</message>
</context>
<context>
<name>LogListWidget</name>
<message>
<location filename="../gui/loglistwidget.cpp" line="47"/>
<source>Copy</source>
<translation>Copier</translation>
</message>
<message>
<location filename="../gui/loglistwidget.cpp" line="48"/>
<source>Clear</source>
<translation>Effacer</translation>
</message>
</context>
<context>
<name>MainWindow</name>
<message>
<location filename="../gui/mainwindow.ui" line="37"/>
<source>&Edit</source>
<translation>&Édition</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="60"/>
<source>&Tools</source>
<translation>Ou&tils</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="80"/>
<source>&File</source>
<translation>&Fichier</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="50"/>
<source>&Help</source>
<translation>&Aide</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="64"/>
<source>On Downloads &Done</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="90"/>
<source>&View</source>
<translation>A&ffichage</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="161"/>
<source>&Options...</source>
<translation>&Options...</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="174"/>
<source>&Resume</source>
<translation>&Démarrer</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="206"/>
<source>Torrent &Creator</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="211"/>
<source>Set Upload Limit...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="216"/>
<source>Set Download Limit...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="226"/>
<source>Set Global Download Limit...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="231"/>
<source>Set Global Upload Limit...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="236"/>
<source>Minimum Priority</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="244"/>
<source>Top Priority</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="252"/>
<source>Decrease Priority</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="260"/>
<source>Increase Priority</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="271"/>
<location filename="../gui/mainwindow.ui" line="274"/>
<source>Alternative Speed Limits</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="282"/>
<source>&Top Toolbar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="285"/>
<source>Display Top Toolbar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="293"/>
<source>S&peed in Title Bar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="296"/>
<source>Show Transfer Speed in Title Bar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="304"/>
<source>&RSS Reader</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="312"/>
<source>Search &Engine</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="317"/>
<source>L&ock qBittorrent</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="331"/>
<source>&Import Existing Torrent...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="334"/>
<source>Import Torrent...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="339"/>
<source>Do&nate!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="347"/>
<source>R&esume All</source>
<translation>Tout Dé&marrer</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="360"/>
<source>&Log</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="371"/>
<source>&Exit qBittorrent</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="379"/>
<source>&Suspend System</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="387"/>
<source>&Hibernate System</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="395"/>
<source>S&hutdown System</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="403"/>
<source>&Disabled</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="418"/>
<source>&Statistics</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="423"/>
<source>Check for Updates</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="426"/>
<source>Check for Program Updates</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="169"/>
<source>&About</source>
<translation>&À propos</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="156"/>
<source>Exit</source>
<translation>Quitter</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="182"/>
<source>&Pause</source>
<translation>Mettre en &pause</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="190"/>
<source>&Delete</source>
<translation>&Supprimer</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="352"/>
<source>P&ause All</source>
<translation>Tout &mettre en pause</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="145"/>
<source>&Add Torrent File...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="148"/>
<source>Open</source>
<translation>Ouvrir</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="153"/>
<source>E&xit</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="164"/>
<source>Options</source>
<translation>Options</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="177"/>
<source>Resume</source>
<translation>Démarrer</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="185"/>
<source>Pause</source>
<translation>Mettre en pause</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="193"/>
<source>Delete</source>
<translation>Supprimer</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="201"/>
<source>Open URL</source>
<translation>Ouvrir URL</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="221"/>
<source>&Documentation</source>
<translation>&Documentation</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="320"/>
<source>Lock</source>
<translation>Verrouiller</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="408"/>
<location filename="../gui/mainwindow.cpp" line="1293"/>
<source>Show</source>
<translation>Afficher</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1480"/>
<source>Check for program updates</source>
<translation>Vérifier la disponibilité de mises à jour du logiciel</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="323"/>
<source>Lock qBittorrent</source>
<translation>Verrouiller qBittorrent</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="198"/>
<source>Add Torrent &Link...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="342"/>
<source>If you like qBittorrent, please donate!</source>
<translation>Si vous aimez qBittorrent, faites un don !</translation>
</message>
<message>
<location filename="../gui/mainwindow.ui" line="363"/>
<location filename="../gui/mainwindow.cpp" line="1508"/>
<source>Execution Log</source>
<translation>Journal d'exécution</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="478"/>
<source>Clear the password</source>
<translation>Effacer le mot de passe</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="187"/>
<source>Filter torrent list...</source>
<translation>Filtrer la liste des torrents…</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="158"/>
<source>&Set Password</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="160"/>
<source>&Clear Password</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="205"/>
<source>Transfers</source>
<translation>Transferts</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="343"/>
<source>Torrent file association</source>
<translation>Association aux fichiers torrent</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="344"/>
<source>qBittorrent is not the default application to open torrent files or Magnet links.
Do you want to associate qBittorrent to torrent files and Magnet links?</source>
<translation>qBittorrent n'est pas l'application par défaut utilisée pour ouvrir les fichiers torrent ou les liens magnet.
Voulez-vous associer qBittorrent aux fichiers torrent et liens magnet ?</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="375"/>
<source>Icons Only</source>
<translation>Icônes seulement</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="377"/>
<source>Text Only</source>
<translation>Texte seulement</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="379"/>
<source>Text Alongside Icons</source>
<translation>Texte à côté des Icônes</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="381"/>
<source>Text Under Icons</source>
<translation>Texte sous les Icônes</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="383"/>
<source>Follow System Style</source>
<translation>Suivre le style du système</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="463"/>
<location filename="../gui/mainwindow.cpp" line="490"/>
<location filename="../gui/mainwindow.cpp" line="792"/>
<source>UI lock password</source>
<translation>Mot de passe de verrouillage</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="463"/>
<location filename="../gui/mainwindow.cpp" line="490"/>
<location filename="../gui/mainwindow.cpp" line="792"/>
<source>Please type the UI lock password:</source>
<translation>Veuillez entrer le mot de passe de verrouillage :</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="467"/>
<source>The password should contain at least 3 characters</source>
<translation>The mot de passe doit contenir au moins 3 caractères</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="472"/>
<source>Password update</source>
<translation>Mise à jour du mot de passe</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="472"/>
<source>The UI lock password has been successfully updated</source>
<translation>Le mot de passe de verrouillage a été mis à jour</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="478"/>
<source>Are you sure you want to clear the password?</source>
<translation>Êtes vous sûr de vouloir effacer le mot de passe ?</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="530"/>
<source>Search</source>
<translation>Recherche</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="541"/>
<source>Transfers (%1)</source>
<translation>Transferts (%1)</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="632"/>
<source>Error</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="632"/>
<source>Failed to add torrent: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="638"/>
<source>Download completion</source>
<translation>Fin du téléchargement</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="644"/>
<source>I/O Error</source>
<comment>i.e: Input/Output Error</comment>
<translation>Erreur E/S</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="705"/>
<source>Recursive download confirmation</source>
<translation>Confirmation pour téléchargement récursif</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="706"/>
<source>Yes</source>
<translation>Oui</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="707"/>
<source>No</source>
<translation>Non</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="708"/>
<source>Never</source>
<translation>Jamais</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="728"/>
<source>Global Upload Speed Limit</source>
<translation>Limite globale de la vitesse d'envoi</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="744"/>
<source>Global Download Speed Limit</source>
<translation>Limite globale de la vitesse de réception</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="894"/>
<source>&No</source>
<translation type="unfinished">&Non</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="895"/>
<source>&Yes</source>
<translation type="unfinished">&Oui</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="896"/>
<source>&Always Yes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1379"/>
<source>Python found in %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1394"/>
<source>Old Python Interpreter</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1464"/>
<source>qBittorrent Update Available</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1474"/>
<source>Already Using the Latest qBittorrent Version</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1404"/>
<source>Undetermined Python version</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="638"/>
<source>'%1' has finished downloading.</source>
<comment>e.g: xxx.avi has finished downloading.</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="644"/>
<source>An I/O error occurred for torrent '%1'.
Reason: %2</source>
<comment>e.g: An error occurred for torrent 'xxx.avi'.
Reason: disk is full.</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="705"/>
<source>The torrent '%1' contains torrent files, do you want to proceed with their download?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="720"/>
<source>Couldn't download file at URL '%1', reason: %2.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1394"/>
<source>Your Python version %1 is outdated. Please upgrade to latest version for search engines to work. Minimum requirement: 2.7.0/3.3.0.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1404"/>
<source>Couldn't determine your Python version (%1). Search engine disabled.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1415"/>
<location filename="../gui/mainwindow.cpp" line="1427"/>
<source>Missing Python Interpreter</source>
<translation>L’interpréteur Python est absent</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1416"/>
<source>Python is required to use the search engine but it does not seem to be installed.
Do you want to install it now?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1427"/>
<source>Python is required to use the search engine but it does not seem to be installed.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1465"/>
<source>A new version is available.
Update to version %1?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1475"/>
<source>No updates available.
You are already using the latest version.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1479"/>
<source>&Check for Updates</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1577"/>
<source>Checking for Updates...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1578"/>
<source>Already checking for program updates in the background</source>
<translation>Recherche de mises à jour déjà en cours en tâche de fond</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1593"/>
<source>Python found in '%1'</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1646"/>
<source>Download error</source>
<translation>Erreur de téléchargement</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1646"/>
<source>Python setup could not be downloaded, reason: %1.
Please install it manually.</source>
<translation>L’installateur Python ne peut pas être téléchargé pour la raison suivante : %1.
Veuillez l’installer manuellement.</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="467"/>
<location filename="../gui/mainwindow.cpp" line="806"/>
<source>Invalid password</source>
<translation>Mot de passe invalide</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="508"/>
<location filename="../gui/mainwindow.cpp" line="520"/>
<source>RSS (%1)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="720"/>
<source>URL download error</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="806"/>
<source>The password is invalid</source>
<translation>Le mot de passe fourni est invalide</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1188"/>
<location filename="../gui/mainwindow.cpp" line="1195"/>
<source>DL speed: %1</source>
<comment>e.g: Download speed: 10 KiB/s</comment>
<translation>Vitesse de réception : %1</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1191"/>
<location filename="../gui/mainwindow.cpp" line="1197"/>
<source>UP speed: %1</source>
<comment>e.g: Upload speed: 10 KiB/s</comment>
<translation>Vitesse d'envoi : %1</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1202"/>
<source>[D: %1, U: %2] qBittorrent %3</source>
<comment>D = Download; U = Upload; %3 is qBittorrent version</comment>
<translation>[R : %1, E : %2] qBittorrent %3</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1293"/>
<source>Hide</source>
<translation>Cacher</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="891"/>
<source>Exiting qBittorrent</source>
<translation>Fermeture de qBittorrent</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="892"/>
<source>Some files are currently transferring.
Are you sure you want to quit qBittorrent?</source>
<translation>Certains fichiers sont en cours de transfert.
Êtes-vous sûr de vouloir quitter qBittorrent ?</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1033"/>
<source>Open Torrent Files</source>
<translation>Ouvrir fichiers torrent</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1034"/>
<source>Torrent Files</source>
<translation>Fichiers torrent</translation>
</message>
<message>
<location filename="../gui/mainwindow.cpp" line="1069"/>
<source>Options were saved successfully.</source>
<translation>Préférences sauvegardées avec succès.</translation>
</message>
</context>
<context>
<name>Net::DNSUpdater</name>
<message>
<location filename="../core/net/dnsupdater.cpp" line="200"/>
<source>Your dynamic DNS was successfully updated.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/dnsupdater.cpp" line="204"/>
<source>Dynamic DNS error: The service is temporarily unavailable, it will be retried in 30 minutes.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/dnsupdater.cpp" line="213"/>
<source>Dynamic DNS error: hostname supplied does not exist under specified account.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/dnsupdater.cpp" line="218"/>
<source>Dynamic DNS error: Invalid username/password.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/dnsupdater.cpp" line="223"/>
<source>Dynamic DNS error: qBittorrent was blacklisted by the service, please report a bug at http://bugs.qbittorrent.org.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/dnsupdater.cpp" line="229"/>
<source>Dynamic DNS error: %1 was returned by the service, please report a bug at http://bugs.qbittorrent.org.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/dnsupdater.cpp" line="235"/>
<source>Dynamic DNS error: Your username was blocked due to abuse.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/dnsupdater.cpp" line="256"/>
<source>Dynamic DNS error: supplied domain name is invalid.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/dnsupdater.cpp" line="267"/>
<source>Dynamic DNS error: supplied username is too short.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/dnsupdater.cpp" line="278"/>
<source>Dynamic DNS error: supplied password is too short.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>Net::DownloadHandler</name>
<message>
<location filename="../core/net/downloadhandler.cpp" line="104"/>
<source>I/O Error</source>
<translation type="unfinished">Erreur E/S</translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="117"/>
<source>The file size is %1. It exceeds the download limit of %2.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="186"/>
<source>Unexpected redirect to magnet URI.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>Net::GeoIPManager</name>
<message>
<location filename="../core/net/geoipmanager.cpp" line="104"/>
<location filename="../core/net/geoipmanager.cpp" line="432"/>
<source>GeoIP database loaded. Type: %1. Build time: %2.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="108"/>
<location filename="../core/net/geoipmanager.cpp" line="453"/>
<source>Couldn't load GeoIP database. Reason: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="141"/>
<location filename="../core/net/geoipmanager.cpp" line="398"/>
<source>N/A</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="142"/>
<source>Asia/Pacific Region</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="143"/>
<source>Europe</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="144"/>
<source>Andorra</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="145"/>
<source>United Arab Emirates</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="146"/>
<source>Afghanistan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="147"/>
<source>Antigua and Barbuda</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="148"/>
<source>Anguilla</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="149"/>
<source>Albania</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="150"/>
<source>Armenia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="151"/>
<source>Netherlands Antilles</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="152"/>
<source>Angola</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="153"/>
<source>Antarctica</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="154"/>
<source>Argentina</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="155"/>
<source>American Samoa</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="156"/>
<source>Austria</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="157"/>
<source>Australia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="158"/>
<source>Aruba</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="159"/>
<source>Azerbaijan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="160"/>
<source>Bosnia and Herzegovina</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="161"/>
<source>Barbados</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="162"/>
<source>Bangladesh</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="163"/>
<source>Belgium</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="164"/>
<source>Burkina Faso</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="165"/>
<source>Bulgaria</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="166"/>
<source>Bahrain</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="167"/>
<source>Burundi</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="168"/>
<source>Benin</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="169"/>
<source>Bermuda</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="170"/>
<source>Brunei Darussalam</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="171"/>
<source>Bolivia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="172"/>
<source>Brazil</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="173"/>
<source>Bahamas</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="174"/>
<source>Bhutan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="175"/>
<source>Bouvet Island</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="176"/>
<source>Botswana</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="177"/>
<source>Belarus</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="178"/>
<source>Belize</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="179"/>
<source>Canada</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="180"/>
<source>Cocos (Keeling) Islands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="181"/>
<source>Congo, The Democratic Republic of the</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="182"/>
<source>Central African Republic</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="183"/>
<source>Congo</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="184"/>
<source>Switzerland</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="185"/>
<source>Cote D'Ivoire</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="186"/>
<source>Cook Islands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="187"/>
<source>Chile</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="188"/>
<source>Cameroon</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="189"/>
<source>China</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="190"/>
<source>Colombia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="191"/>
<source>Costa Rica</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="192"/>
<source>Cuba</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="193"/>
<source>Cape Verde</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="194"/>
<source>Christmas Island</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="195"/>
<source>Cyprus</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="196"/>
<source>Czech Republic</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="197"/>
<source>Germany</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="198"/>
<source>Djibouti</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="199"/>
<source>Denmark</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="200"/>
<source>Dominica</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="201"/>
<source>Dominican Republic</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="202"/>
<source>Algeria</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="203"/>
<source>Ecuador</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="204"/>
<source>Estonia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="205"/>
<source>Egypt</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="206"/>
<source>Western Sahara</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="207"/>
<source>Eritrea</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="208"/>
<source>Spain</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="209"/>
<source>Ethiopia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="210"/>
<source>Finland</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="211"/>
<source>Fiji</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="212"/>
<source>Falkland Islands (Malvinas)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="213"/>
<source>Micronesia, Federated States of</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="214"/>
<source>Faroe Islands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="215"/>
<source>France</source>
<translation type="unfinished">France</translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="216"/>
<source>France, Metropolitan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="217"/>
<source>Gabon</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="218"/>
<source>United Kingdom</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="219"/>
<source>Grenada</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="220"/>
<source>Georgia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="221"/>
<source>French Guiana</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="222"/>
<source>Ghana</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="223"/>
<source>Gibraltar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="224"/>
<source>Greenland</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="225"/>
<source>Gambia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="226"/>
<source>Guinea</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="227"/>
<source>Guadeloupe</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="228"/>
<source>Equatorial Guinea</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="229"/>
<source>Greece</source>
<translation type="unfinished">Grèce</translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="230"/>
<source>South Georgia and the South Sandwich Islands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="231"/>
<source>Guatemala</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="232"/>
<source>Guam</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="233"/>
<source>Guinea-Bissau</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="234"/>
<source>Guyana</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="235"/>
<source>Hong Kong</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="236"/>
<source>Heard Island and McDonald Islands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="237"/>
<source>Honduras</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="238"/>
<source>Croatia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="239"/>
<source>Haiti</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="240"/>
<source>Hungary</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="241"/>
<source>Indonesia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="242"/>
<source>Ireland</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="243"/>
<source>Israel</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="244"/>
<source>India</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="245"/>
<source>British Indian Ocean Territory</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="246"/>
<source>Iraq</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="247"/>
<source>Iran, Islamic Republic of</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="248"/>
<source>Iceland</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="249"/>
<source>Italy</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="250"/>
<source>Jamaica</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="251"/>
<source>Jordan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="252"/>
<source>Japan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="253"/>
<source>Kenya</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="254"/>
<source>Kyrgyzstan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="255"/>
<source>Cambodia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="256"/>
<source>Kiribati</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="257"/>
<source>Comoros</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="258"/>
<source>Saint Kitts and Nevis</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="259"/>
<source>Korea, Democratic People's Republic of</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="260"/>
<source>Korea, Republic of</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="261"/>
<source>Kuwait</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="262"/>
<source>Cayman Islands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="263"/>
<source>Kazakhstan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="264"/>
<source>Lao People's Democratic Republic</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="265"/>
<source>Lebanon</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="266"/>
<source>Saint Lucia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="267"/>
<source>Liechtenstein</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="268"/>
<source>Sri Lanka</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="269"/>
<source>Liberia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="270"/>
<source>Lesotho</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="271"/>
<source>Lithuania</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="272"/>
<source>Luxembourg</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="273"/>
<source>Latvia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="274"/>
<source>Libyan Arab Jamahiriya</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="275"/>
<source>Morocco</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="276"/>
<source>Monaco</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="277"/>
<source>Moldova, Republic of</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="278"/>
<source>Madagascar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="279"/>
<source>Marshall Islands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="280"/>
<source>Macedonia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="281"/>
<source>Mali</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="282"/>
<source>Myanmar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="283"/>
<source>Mongolia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="284"/>
<source>Macau</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="285"/>
<source>Northern Mariana Islands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="286"/>
<source>Martinique</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="287"/>
<source>Mauritania</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="288"/>
<source>Montserrat</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="289"/>
<source>Malta</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="290"/>
<source>Mauritius</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="291"/>
<source>Maldives</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="292"/>
<source>Malawi</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="293"/>
<source>Mexico</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="294"/>
<source>Malaysia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="295"/>
<source>Mozambique</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="296"/>
<source>Namibia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="297"/>
<source>New Caledonia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="298"/>
<source>Niger</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="299"/>
<source>Norfolk Island</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="300"/>
<source>Nigeria</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="301"/>
<source>Nicaragua</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="302"/>
<source>Netherlands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="303"/>
<source>Norway</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="304"/>
<source>Nepal</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="305"/>
<source>Nauru</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="306"/>
<source>Niue</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="307"/>
<source>New Zealand</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="308"/>
<source>Oman</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="309"/>
<source>Panama</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="310"/>
<source>Peru</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="311"/>
<source>French Polynesia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="312"/>
<source>Papua New Guinea</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="313"/>
<source>Philippines</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="314"/>
<source>Pakistan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="315"/>
<source>Poland</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="316"/>
<source>Saint Pierre and Miquelon</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="317"/>
<source>Pitcairn Islands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="318"/>
<source>Puerto Rico</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="319"/>
<source>Palestinian Territory</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="320"/>
<source>Portugal</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="321"/>
<source>Palau</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="322"/>
<source>Paraguay</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="323"/>
<source>Qatar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="324"/>
<source>Reunion</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="325"/>
<source>Romania</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="326"/>
<source>Russian Federation</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="327"/>
<source>Rwanda</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="328"/>
<source>Saudi Arabia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="329"/>
<source>Solomon Islands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="330"/>
<source>Seychelles</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="331"/>
<source>Sudan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="332"/>
<source>Sweden</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="333"/>
<source>Singapore</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="334"/>
<source>Saint Helena</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="335"/>
<source>Slovenia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="336"/>
<source>Svalbard and Jan Mayen</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="337"/>
<source>Slovakia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="338"/>
<source>Sierra Leone</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="339"/>
<source>San Marino</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="340"/>
<source>Senegal</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="341"/>
<source>Somalia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="342"/>
<source>Suriname</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="343"/>
<source>Sao Tome and Principe</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="344"/>
<source>El Salvador</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="345"/>
<source>Syrian Arab Republic</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="346"/>
<source>Swaziland</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="347"/>
<source>Turks and Caicos Islands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="348"/>
<source>Chad</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="349"/>
<source>French Southern Territories</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="350"/>
<source>Togo</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="351"/>
<source>Thailand</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="352"/>
<source>Tajikistan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="353"/>
<source>Tokelau</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="354"/>
<source>Turkmenistan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="355"/>
<source>Tunisia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="356"/>
<source>Tonga</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="357"/>
<source>Timor-Leste</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="358"/>
<source>Turkey</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="359"/>
<source>Trinidad and Tobago</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="360"/>
<source>Tuvalu</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="361"/>
<source>Taiwan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="362"/>
<source>Tanzania, United Republic of</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="363"/>
<source>Ukraine</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="364"/>
<source>Uganda</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="365"/>
<source>United States Minor Outlying Islands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="366"/>
<source>United States</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="367"/>
<source>Uruguay</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="368"/>
<source>Uzbekistan</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="369"/>
<source>Holy See (Vatican City State)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="370"/>
<source>Saint Vincent and the Grenadines</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="371"/>
<source>Venezuela</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="372"/>
<source>Virgin Islands, British</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="373"/>
<source>Virgin Islands, U.S.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="374"/>
<source>Vietnam</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="375"/>
<source>Vanuatu</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="376"/>
<source>Wallis and Futuna</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="377"/>
<source>Samoa</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="378"/>
<source>Yemen</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="379"/>
<source>Mayotte</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="380"/>
<source>Serbia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="381"/>
<source>South Africa</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="382"/>
<source>Zambia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="383"/>
<source>Montenegro</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="384"/>
<source>Zimbabwe</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="385"/>
<source>Anonymous Proxy</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="386"/>
<source>Satellite Provider</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="387"/>
<source>Other</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="388"/>
<source>Aland Islands</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="389"/>
<source>Guernsey</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="390"/>
<source>Isle of Man</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="391"/>
<source>Jersey</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="392"/>
<source>Saint Barthelemy</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="393"/>
<source>Saint Martin</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="421"/>
<source>Could not uncompress GeoIP database file.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="442"/>
<source>Couldn't save downloaded GeoIP database file.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="445"/>
<source>Successfully updated GeoIP database.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/geoipmanager.cpp" line="460"/>
<source>Couldn't download GeoIP database file. Reason: %1</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>Net::PortForwarder</name>
<message>
<location filename="../core/net/portforwarder.cpp" line="110"/>
<source>UPnP / NAT-PMP support [ON]</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/portforwarder.cpp" line="119"/>
<source>UPnP / NAT-PMP support [OFF]</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>Net::Smtp</name>
<message>
<location filename="../core/net/smtp.cpp" line="501"/>
<source>Email Notification Error:</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PeerListWidget</name>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="68"/>
<source>IP</source>
<translation>IP</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="69"/>
<source>Port</source>
<translation>Port</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="70"/>
<source>Flags</source>
<translation>Indicateurs</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="71"/>
<source>Connection</source>
<translation>Connexion</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="72"/>
<source>Client</source>
<comment>i.e.: Client application</comment>
<translation>Logiciel</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="73"/>
<source>Progress</source>
<comment>i.e: % downloaded</comment>
<translation>Progression</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="74"/>
<source>Down Speed</source>
<comment>i.e: Download speed</comment>
<translation>Vitesse DL</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="75"/>
<source>Up Speed</source>
<comment>i.e: Upload speed</comment>
<translation>Vitesse UP</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="76"/>
<source>Downloaded</source>
<comment>i.e: total data downloaded</comment>
<translation>Téléchargé</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="77"/>
<source>Uploaded</source>
<comment>i.e: total data uploaded</comment>
<translation>Envoyé</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="78"/>
<source>Relevance</source>
<comment>i.e: How relevant this peer is to us. How many pieces it has that we don't.</comment>
<translation>Pertinence</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="166"/>
<source>Add a new peer...</source>
<translation>Ajouter un nouveau pair…</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="172"/>
<source>Copy selected</source>
<translation>Copie sélectionnée</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="174"/>
<location filename="../gui/properties/peerlistwidget.cpp" line="212"/>
<source>Ban peer permanently</source>
<translation>Bloquer le pair indéfiniment</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="186"/>
<source>Manually adding peer '%1'...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="190"/>
<source>The peer '%1' could not be added to this torrent.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="223"/>
<source>Manually banning peer '%1'...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="194"/>
<location filename="../gui/properties/peerlistwidget.cpp" line="196"/>
<source>Peer addition</source>
<translation>Ajout d'un pair</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="194"/>
<source>Some peers could not be added. Check the Log for details.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="196"/>
<source>The peers were added to this torrent.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="212"/>
<source>Are you sure you want to ban permanently the selected peers?</source>
<translation>Êtes-vous sûr de vouloir bloquer les pairs sélectionnés ?</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="213"/>
<source>&Yes</source>
<translation>&Oui</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="213"/>
<source>&No</source>
<translation>&Non</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="388"/>
<source>interested(local) and choked(peer)</source>
<translation>intéressé (local) et engorgé (pair)</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="394"/>
<source>interested(local) and unchoked(peer)</source>
<translation>intéressé (local) et non engorgé (pair)</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="403"/>
<source>interested(peer) and choked(local)</source>
<translation>intéressé (pair) et engorgé (local)</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="409"/>
<source>interested(peer) and unchoked(local)</source>
<translation>intéressé (pair) et non engorgé (local)</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="417"/>
<source>optimistic unchoke</source>
<translation>non-étranglement optimiste</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="424"/>
<source>peer snubbed</source>
<translation>pair évité</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="431"/>
<source>incoming connection</source>
<translation>connexion entrante</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="438"/>
<source>not interested(local) and unchoked(peer)</source>
<translation>non intéressé (local) et non engorgé (pair)</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="445"/>
<source>not interested(peer) and unchoked(local)</source>
<translation>non intéressé (pair) et non engorgé (local)</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="452"/>
<source>peer from PEX</source>
<translation>pair issu de PEX</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="459"/>
<source>peer from DHT</source>
<translation>pair issu du DHT</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="466"/>
<source>encrypted traffic</source>
<translation>trafic chiffré</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="473"/>
<source>encrypted handshake</source>
<translation>poignée de main chiffrée</translation>
</message>
<message>
<location filename="../gui/properties/peerlistwidget.cpp" line="488"/>
<source>peer from LSD</source>
<translation>pair issu de LSD</translation>
</message>
</context>
<context>
<name>PeersAdditionDlg</name>
<message>
<location filename="../gui/properties/peersadditiondlg.cpp" line="58"/>
<source>No peer entered</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/peersadditiondlg.cpp" line="59"/>
<source>Please type at least one peer.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/peersadditiondlg.cpp" line="69"/>
<source>Invalid peer</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/peersadditiondlg.cpp" line="70"/>
<source>The peer '%1' is invalid.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PieceAvailabilityBar</name>
<message>
<location filename="../gui/properties/pieceavailabilitybar.cpp" line="39"/>
<source>White: Unavailable pieces</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/pieceavailabilitybar.cpp" line="39"/>
<source>Blue: Available pieces</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>Preferences</name>
<message>
<location filename="../gui/options.ui" line="69"/>
<source>Downloads</source>
<translation>Téléchargements</translation>
</message>
<message>
<location filename="../gui/options.ui" line="80"/>
<source>Connection</source>
<translation>Connexion</translation>
</message>
<message>
<location filename="../gui/options.ui" line="91"/>
<source>Speed</source>
<translation>Vitesse</translation>
</message>
<message>
<location filename="../gui/options.ui" line="113"/>
<source>Web UI</source>
<translation>Interface web</translation>
</message>
<message>
<location filename="../gui/options.ui" line="124"/>
<source>Advanced</source>
<translation>Avancé</translation>
</message>
<message>
<location filename="../gui/options.ui" line="209"/>
<source>(Requires restart)</source>
<translation>(Redémarrage nécessaire)</translation>
</message>
<message>
<location filename="../gui/options.ui" line="253"/>
<source>Use alternating row colors</source>
<extracomment>In transfer list, one every two rows will have grey background.</extracomment>
<translation>Alterner la couleur des lignes</translation>
</message>
<message>
<location filename="../gui/options.ui" line="295"/>
<location filename="../gui/options.ui" line="321"/>
<source>Start / Stop Torrent</source>
<translation>Démarrer / Arrêter torrent</translation>
</message>
<message>
<location filename="../gui/options.ui" line="305"/>
<location filename="../gui/options.ui" line="331"/>
<source>No action</source>
<translation>Aucune action</translation>
</message>
<message>
<location filename="../gui/options.ui" line="701"/>
<source>Append .!qB extension to incomplete files</source>
<translation>Ajouter l'extension .!qB aux noms des fichiers incomplets</translation>
</message>
<message>
<location filename="../gui/options.ui" line="804"/>
<source>Copy .torrent files to:</source>
<translation>Copier les fichiers .torrent dans :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1121"/>
<source>Connections Limits</source>
<translation>Limites de connexions</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1274"/>
<source>Proxy Server</source>
<translation>Serveur mandataire (proxy)</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1577"/>
<source>Global Rate Limits</source>
<translation>Limites de vitesse globales</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1879"/>
<source>Apply rate limit to transport overhead</source>
<translation>Appliquer les limites de vitesse au surplus généré par le protocole</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1672"/>
<source>Schedule the use of alternative rate limits</source>
<translation>Planifier l'utilisation des vitesses limites alternatives</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1684"/>
<source>From:</source>
<extracomment>from (time1 to time2)</extracomment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="1708"/>
<source>To:</source>
<extracomment>time1 to time2</extracomment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="1993"/>
<source>Enable Local Peer Discovery to find more peers</source>
<translation>Activer la découverte de sources sur le réseau local</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2005"/>
<source>Encryption mode:</source>
<translation>Mode de chiffrement :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2013"/>
<source>Prefer encryption</source>
<translation>Chiffrement préféré</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2018"/>
<source>Require encryption</source>
<translation>Chiffrement requis</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2023"/>
<source>Disable encryption</source>
<translation>Chiffrement désactivé</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2058"/>
<source> (<a href="http://github.com/qbittorrent/qBittorrent/wiki/Anonymous-Mode">More information</a>)</source>
<translation>(<a href="http://github.com/qbittorrent/qBittorrent/wiki/Anonymous-Mode">Plus d'information</a>)</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2101"/>
<source>Maximum active downloads:</source>
<translation>Nombre maximum de téléchargements actifs :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2121"/>
<source>Maximum active uploads:</source>
<translation>Nombre maximum d'envois actifs :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2141"/>
<source>Maximum active torrents:</source>
<translation>Nombre maximum de torrents actifs :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="537"/>
<source>When adding a torrent</source>
<translation>À l'ajout d'un torrent</translation>
</message>
<message>
<location filename="../gui/options.ui" line="58"/>
<source>Behavior</source>
<translation>Comportement</translation>
</message>
<message>
<location filename="../gui/options.ui" line="173"/>
<source>Language</source>
<translation>Langue</translation>
</message>
<message>
<location filename="../gui/options.ui" line="553"/>
<source>Display torrent content and some options</source>
<translation>Afficher le contenu du torrent et quelques paramètres</translation>
</message>
<message>
<location filename="../gui/options.ui" line="994"/>
<source>Run external program on torrent completion</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="1057"/>
<source>Port used for incoming connections:</source>
<translation>Port pour les connexions entrantes :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1077"/>
<source>Random</source>
<translation>Aléatoire</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1127"/>
<source>Global maximum number of connections:</source>
<translation>Nombre maximum global de connexions :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1153"/>
<source>Maximum number of connections per torrent:</source>
<translation>Nombre maximum de connexions par torrent :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1176"/>
<source>Maximum number of upload slots per torrent:</source>
<translation>Nombre maximum d'emplacements d'envoi par torrent :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1599"/>
<location filename="../gui/options.ui" line="1790"/>
<source>Upload:</source>
<translation>Envoi :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1643"/>
<location filename="../gui/options.ui" line="1797"/>
<source>Download:</source>
<translation>Réception :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1606"/>
<location filename="../gui/options.ui" line="1629"/>
<location filename="../gui/options.ui" line="1836"/>
<location filename="../gui/options.ui" line="1843"/>
<source>KiB/s</source>
<translation>Kio/s</translation>
</message>
<message>
<location filename="../gui/options.ui" line="771"/>
<source>Remove folder</source>
<translation>Supprimer le dossier</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1749"/>
<source>Every day</source>
<translation>Tous les jours</translation>
</message>
<message utf8="true">
<location filename="../gui/options.ui" line="1977"/>
<source>Exchange peers with compatible Bittorrent clients (µTorrent, Vuze, ...)</source>
<translation>Échanger des pairs avec les applications compatibles (µTorrent, Vuze, …)</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1316"/>
<source>Host:</source>
<translation>Hôte :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1295"/>
<source>SOCKS4</source>
<translation>SOCKS4</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1282"/>
<source>Type:</source>
<translation>Type :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="14"/>
<source>Options</source>
<translation>Options</translation>
</message>
<message>
<location filename="../gui/options.ui" line="269"/>
<source>Action on double-click</source>
<translation>Action du double-clic</translation>
</message>
<message>
<location filename="../gui/options.ui" line="278"/>
<source>Downloading torrents:</source>
<translation>Torrents incomplets :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="300"/>
<location filename="../gui/options.ui" line="326"/>
<source>Open destination folder</source>
<translation>Ouvrir le répertoire de destination</translation>
</message>
<message>
<location filename="../gui/options.ui" line="313"/>
<source>Completed torrents:</source>
<translation>Torrents complets :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="345"/>
<source>Desktop</source>
<translation>Bureau</translation>
</message>
<message>
<location filename="../gui/options.ui" line="358"/>
<source>Show splash screen on start up</source>
<translation>Afficher l'écran de démarrage</translation>
</message>
<message>
<location filename="../gui/options.ui" line="368"/>
<source>Start qBittorrent minimized</source>
<translation>Démarrer qBittorrent avec la fenêtre réduite</translation>
</message>
<message>
<location filename="../gui/options.ui" line="394"/>
<source>Minimize qBittorrent to notification area</source>
<translation>Réduire qBittorrent dans la zone de notification</translation>
</message>
<message>
<location filename="../gui/options.ui" line="404"/>
<source>Close qBittorrent to notification area</source>
<comment>i.e: The systray tray icon will still be visible when closing the main window.</comment>
<translation>Conserver dans la zone de notification à la fermeture</translation>
</message>
<message>
<location filename="../gui/options.ui" line="413"/>
<source>Tray icon style:</source>
<translation>Style de l'icône :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="421"/>
<source>Normal</source>
<translation>Normale</translation>
</message>
<message>
<location filename="../gui/options.ui" line="426"/>
<source>Monochrome (Dark theme)</source>
<translation>Monochrome (thème foncé)</translation>
</message>
<message>
<location filename="../gui/options.ui" line="431"/>
<source>Monochrome (Light theme)</source>
<translation>Monochrome (thème clair)</translation>
</message>
<message>
<location filename="../gui/options.ui" line="181"/>
<source>User Interface Language:</source>
<translation>Langue de l'interface utilisateur :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="237"/>
<source>Transfer List</source>
<translation>Liste des transferts</translation>
</message>
<message>
<location filename="../gui/options.ui" line="243"/>
<source>Confirm when deleting torrents</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="351"/>
<source>Start qBittorrent on Windows start up</source>
<translation>Démarrer qBittorrent au lancement de Windows</translation>
</message>
<message>
<location filename="../gui/options.ui" line="375"/>
<source>Confirmation on exit when torrents are active</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="385"/>
<source>Show qBittorrent in notification area</source>
<translation>Afficher l'icône de qBittorrent dans la zone de notification</translation>
</message>
<message>
<location filename="../gui/options.ui" line="444"/>
<source>File association</source>
<translation>Association des fichiers</translation>
</message>
<message>
<location filename="../gui/options.ui" line="450"/>
<source>Use qBittorrent for .torrent files</source>
<translation>Utiliser qBittorrent pour les fichiers .torrent</translation>
</message>
<message>
<location filename="../gui/options.ui" line="457"/>
<source>Use qBittorrent for magnet links</source>
<translation>Utiliser qBittorrent pour les liens magnet</translation>
</message>
<message>
<location filename="../gui/options.ui" line="470"/>
<source>Power Management</source>
<translation>Gestion de l'énergie</translation>
</message>
<message>
<location filename="../gui/options.ui" line="476"/>
<source>Inhibit system sleep when torrents are active</source>
<translation>Empêcher la mise en veille lorsque des torrents sont actifs</translation>
</message>
<message>
<location filename="../gui/options.ui" line="546"/>
<source>Do not start the download automatically</source>
<comment>The torrent will be added to download list in pause state</comment>
<translation>Ne pas démarrer le téléchargement automatiquement</translation>
</message>
<message>
<location filename="../gui/options.ui" line="562"/>
<source>Bring torrent dialog to the front</source>
<translation>Mettre la boite de dialogue du torrent en avant-plan</translation>
</message>
<message>
<location filename="../gui/options.ui" line="584"/>
<source>Hard Disk</source>
<translation>Disque dur</translation>
</message>
<message>
<location filename="../gui/options.ui" line="590"/>
<source>Save files to location:</source>
<translation>Sauvegarder les fichiers vers :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="638"/>
<source>Append the label of the torrent to the save path</source>
<translation>Ajouter à la fin du chemin la catégorie du torrent</translation>
</message>
<message>
<location filename="../gui/options.ui" line="648"/>
<source>Pre-allocate disk space for all files</source>
<translation>Pré-allouer l'espace disque pour tous les fichiers</translation>
</message>
<message>
<location filename="../gui/options.ui" line="655"/>
<source>Keep incomplete torrents in:</source>
<translation>Conserver les torrents incomplets dans :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="708"/>
<source>Automatically add torrents from:</source>
<translation>Ajouter automatiquement les torrents présents dans :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="761"/>
<source>Add folder...</source>
<translation>Ajouter un dossier…</translation>
</message>
<message>
<location filename="../gui/options.ui" line="853"/>
<source>Copy .torrent files for finished downloads to:</source>
<translation>Copier les fichiers .torrent des téléchargements terminés dans :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="909"/>
<source>Email notification upon download completion</source>
<translation>Notification par courriel de fin de téléchargement</translation>
</message>
<message>
<location filename="../gui/options.ui" line="923"/>
<source>Destination email:</source>
<translation>Courriel de destination :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="933"/>
<source>SMTP server:</source>
<translation>Serveur SMTP :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="982"/>
<source>This server requires a secure connection (SSL)</source>
<translation>Ce serveur nécessite une connexion sécurisée (SSL)</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1049"/>
<source>Listening Port</source>
<translation>Port d'écoute</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1099"/>
<source>Use UPnP / NAT-PMP port forwarding from my router</source>
<translation>Utiliser la redirection de port sur mon routeur via UPnP / NAT-PMP</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1109"/>
<source>Use different port on each startup</source>
<translation>Utiliser un port différent à chaque démarrage</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1235"/>
<source>Global maximum number of upload slots:</source>
<translation>Nombre maximum global d'emplacements d'envoi :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1370"/>
<source>Otherwise, the proxy server is only used for tracker connections</source>
<translation>Dans le cas contraire, le proxy sera uniquement utilisé pour contacter les trackers</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1373"/>
<source>Use proxy for peer connections</source>
<translation>Utiliser le serveur mandataire pour se connecter aux utilisateurs (pairs)</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1380"/>
<source>Disable connections not supported by proxies</source>
<translation>Les connexions désactivées ne sont pas supportées par les proxys.</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1393"/>
<source>Use proxy only for torrents</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="1390"/>
<source>RSS feeds, search engine, software updates or anything else other than torrent transfers and related operations (such as peer exchanges) will use a direct connection</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="1462"/>
<source>Info: The password is saved unencrypted</source>
<translation>Information : le mot de passe est sauvegardé en clair</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1475"/>
<source>IP Filtering</source>
<translation>Filtrage IP</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1516"/>
<source>Reload the filter</source>
<translation>Recharger le filtre</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1532"/>
<source>Apply to trackers</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="1872"/>
<source>Apply rate limit to peers on LAN</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="1735"/>
<source>When:</source>
<translation>Quand :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1754"/>
<source>Weekdays</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="1759"/>
<source>Weekends</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="1866"/>
<source>Rate Limits Settings</source>
<translation type="unfinished"></translation>
</message>
<message utf8="true">
<location filename="../gui/options.ui" line="1886"/>
<source>Enable µTP protocol</source>
<translation type="unfinished"></translation>
</message>
<message utf8="true">
<location filename="../gui/options.ui" line="1893"/>
<source>Apply rate limit to µTP protocol</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="1961"/>
<source>Privacy</source>
<translation>Vie privée</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1967"/>
<source>Enable DHT (decentralized network) to find more peers</source>
<translation>Activer le DHT (réseau décentralisé) pour trouver plus de pairs</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1980"/>
<source>Enable Peer Exchange (PeX) to find more peers</source>
<translation>Activer l'échange de pairs (PeX) avec les autres utilisateurs</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1990"/>
<source>Look for peers on your local network</source>
<translation>Rechercher des pairs sur votre réseau local</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2048"/>
<source>Enable when using a proxy or a VPN connection</source>
<translation>Activez quand vous utilisez une connexion par proxy ou par VPN</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2051"/>
<source>Enable anonymous mode</source>
<translation>Activer le mode anonyme</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2200"/>
<source>Do not count slow torrents in these limits</source>
<translation>Ne pas compter les torrents lents dans ces limites</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2221"/>
<source>Seed torrents until their ratio reaches</source>
<translation>Partager les torrents jusqu'à un ratio de</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2250"/>
<source>then</source>
<translation>puis</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2261"/>
<source>Pause them</source>
<translation>Les mettre en pause</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2266"/>
<source>Remove them</source>
<translation>Les supprimer</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2285"/>
<source>Automatically add these trackers to new downloads:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="2404"/>
<source>Use UPnP / NAT-PMP to forward the port from my router</source>
<translation>Utiliser la redirection de port sur mon routeur via UPnP / NAT-PMP</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2414"/>
<source>Use HTTPS instead of HTTP</source>
<translation>Utiliser HTTPS au lieu de HTTP</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2457"/>
<source>Import SSL Certificate</source>
<translation>Importer un certificat SSL</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2510"/>
<source>Import SSL Key</source>
<translation>Importer une clé SSL</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2445"/>
<source>Certificate:</source>
<translation>Certificat :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1666"/>
<source>Alternative Rate Limits</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options.ui" line="2498"/>
<source>Key:</source>
<translation>Clé :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2532"/>
<source><a href=http://httpd.apache.org/docs/2.2/ssl/ssl_faq.html#aboutcerts>Information about certificates</a></source>
<translation><a href=http://httpd.apache.org/docs/2.2/ssl/ssl_faq.html#aboutcerts>Plus d'information sur les certificats</a></translation>
</message>
<message>
<location filename="../gui/options.ui" line="2577"/>
<source>Bypass authentication for localhost</source>
<translation>Contourner l'authentification pour localhost</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2601"/>
<source>Update my dynamic domain name</source>
<translation>Mettre à jour mon nom de domaine dynamique</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2613"/>
<source>Service:</source>
<translation>Service :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2636"/>
<source>Register</source>
<translation>Créer un compte</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2645"/>
<source>Domain name:</source>
<translation>Nom de domaine :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1290"/>
<source>(None)</source>
<translation>(Aucun)</translation>
</message>
<message>
<location filename="../gui/options.ui" line="102"/>
<source>BitTorrent</source>
<translation>BitTorrent</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1305"/>
<source>HTTP</source>
<translation>HTTP</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1342"/>
<location filename="../gui/options.ui" line="2369"/>
<source>Port:</source>
<translation>Port :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="943"/>
<location filename="../gui/options.ui" line="1406"/>
<location filename="../gui/options.ui" line="2545"/>
<source>Authentication</source>
<translation>Authentification</translation>
</message>
<message>
<location filename="../gui/options.ui" line="955"/>
<location filename="../gui/options.ui" line="1420"/>
<location filename="../gui/options.ui" line="2584"/>
<location filename="../gui/options.ui" line="2659"/>
<source>Username:</source>
<translation>Nom d'utilisateur :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="965"/>
<location filename="../gui/options.ui" line="1440"/>
<location filename="../gui/options.ui" line="2591"/>
<location filename="../gui/options.ui" line="2673"/>
<source>Password:</source>
<translation>Mot de passe :</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2086"/>
<source>Torrent Queueing</source>
<translation>Priorisation des torrents</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2210"/>
<source>Share Ratio Limiting</source>
<translation>Limitation du ratio de partage</translation>
</message>
<message>
<location filename="../gui/options.ui" line="2355"/>
<source>Enable Web User Interface (Remote control)</source>
<translation>Activer l'interface web (contrôle distant)</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1300"/>
<source>SOCKS5</source>
<translation>SOCKS5</translation>
</message>
<message>
<location filename="../gui/options.ui" line="1487"/>
<source>Filter path (.dat, .p2p, .p2b):</source>
<translation>Chemin du filtre (.dat, .p2p, .p2b) :</translation>
</message>
<message>
<location filename="../core/preferences.cpp" line="79"/>
<source>Detected unclean program exit. Using fallback file to restore settings.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/preferences.cpp" line="174"/>
<source>An access error occurred while trying to write the configuration file.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/preferences.cpp" line="176"/>
<source>A format error occurred while trying to write the configuration file.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PreviewSelect</name>
<message>
<location filename="../gui/previewselect.cpp" line="54"/>
<source>Name</source>
<translation>Nom</translation>
</message>
<message>
<location filename="../gui/previewselect.cpp" line="55"/>
<source>Size</source>
<translation>Taille</translation>
</message>
<message>
<location filename="../gui/previewselect.cpp" line="56"/>
<source>Progress</source>
<translation>Progression</translation>
</message>
<message>
<location filename="../gui/previewselect.cpp" line="90"/>
<location filename="../gui/previewselect.cpp" line="127"/>
<source>Preview impossible</source>
<translation>Prévisualisation impossible</translation>
</message>
<message>
<location filename="../gui/previewselect.cpp" line="90"/>
<location filename="../gui/previewselect.cpp" line="127"/>
<source>Sorry, we can't preview this file</source>
<translation>Désolé, il est impossible de prévisualiser ce fichier</translation>
</message>
</context>
<context>
<name>PropListDelegate</name>
<message>
<location filename="../gui/properties/proplistdelegate.cpp" line="106"/>
<source>Not downloaded</source>
<translation>Non téléchargé</translation>
</message>
<message>
<location filename="../gui/properties/proplistdelegate.cpp" line="115"/>
<location filename="../gui/properties/proplistdelegate.cpp" line="162"/>
<source>Normal</source>
<comment>Normal (priority)</comment>
<translation>Normale</translation>
</message>
<message>
<location filename="../gui/properties/proplistdelegate.cpp" line="109"/>
<location filename="../gui/properties/proplistdelegate.cpp" line="163"/>
<source>High</source>
<comment>High (priority)</comment>
<translation>Haute</translation>
</message>
<message>
<location filename="../gui/properties/proplistdelegate.cpp" line="103"/>
<source>Mixed</source>
<comment>Mixed (priorities</comment>
<translation>Mixtes</translation>
</message>
<message>
<location filename="../gui/properties/proplistdelegate.cpp" line="112"/>
<location filename="../gui/properties/proplistdelegate.cpp" line="164"/>
<source>Maximum</source>
<comment>Maximum (priority)</comment>
<translation>Maximale</translation>
</message>
</context>
<context>
<name>PropTabBar</name>
<message>
<location filename="../gui/properties/proptabbar.cpp" line="46"/>
<source>General</source>
<translation>Général</translation>
</message>
<message>
<location filename="../gui/properties/proptabbar.cpp" line="51"/>
<source>Trackers</source>
<translation>Trackers</translation>
</message>
<message>
<location filename="../gui/properties/proptabbar.cpp" line="55"/>
<source>Peers</source>
<translation>Pairs</translation>
</message>
<message>
<location filename="../gui/properties/proptabbar.cpp" line="59"/>
<source>HTTP Sources</source>
<translation>Sources HTTP</translation>
</message>
<message>
<location filename="../gui/properties/proptabbar.cpp" line="63"/>
<source>Content</source>
<translation>Contenu</translation>
</message>
<message>
<location filename="../gui/properties/proptabbar.cpp" line="69"/>
<source>Speed</source>
<translation type="unfinished">Vitesse</translation>
</message>
</context>
<context>
<name>PropertiesWidget</name>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="336"/>
<source>Downloaded:</source>
<translation>Téléchargé :</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="113"/>
<source>Availability:</source>
<translation>Disponibilité :</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="84"/>
<source>Progress:</source>
<translation type="unfinished">Progression :</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="160"/>
<source>Transfer</source>
<translation>Transfert</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="552"/>
<source>Time Active:</source>
<extracomment>Time (duration) the torrent is active (not paused)</extracomment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="581"/>
<source>ETA:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="510"/>
<source>Uploaded:</source>
<translation>Envoyé :</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="439"/>
<source>Seeds:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="455"/>
<source>Download Speed:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="191"/>
<source>Upload Speed:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="220"/>
<source>Peers:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="278"/>
<source>Download Limit:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="352"/>
<source>Upload Limit:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="597"/>
<source>Wasted:</source>
<translation>Gaspillé :</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="236"/>
<source>Connections:</source>
<translation>Connexions :</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="610"/>
<source>Information</source>
<translation>Informations</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="869"/>
<source>Comment:</source>
<translation>Commentaire :</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="1054"/>
<source>Torrent content:</source>
<translation>Contenu du torrent :</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="1021"/>
<source>Select All</source>
<translation>Tout sélectionner</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="1028"/>
<source>Select None</source>
<translation>Ne rien sélectionner</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="1107"/>
<source>Normal</source>
<translation>Normale</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="1102"/>
<source>High</source>
<translation>Haute</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="294"/>
<source>Share Ratio:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="410"/>
<source>Reannounce In:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="368"/>
<source>Last Seen Complete:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="628"/>
<source>Total Size:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="657"/>
<source>Pieces:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="686"/>
<source>Created By:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="715"/>
<source>Added On:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="744"/>
<source>Completed On:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="773"/>
<source>Created On:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="802"/>
<source>Torrent Hash:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="834"/>
<source>Save Path:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="1097"/>
<source>Maximum</source>
<translation>Maximale</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.ui" line="1089"/>
<location filename="../gui/properties/propertieswidget.ui" line="1092"/>
<source>Do not download</source>
<translation>Ne pas télécharger</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="428"/>
<source>Never</source>
<translation type="unfinished">Jamais</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="435"/>
<source>%1 x %2 (have %3)</source>
<comment>(torrent pieces) eg 152 x 4MB (have 25)</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="380"/>
<location filename="../gui/properties/propertieswidget.cpp" line="383"/>
<source>%1 (%2 this session)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="392"/>
<source>%1 (seeded for %2)</source>
<comment>e.g. 4m39s (seeded for 3m10s)</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="399"/>
<source>%1 (%2 max)</source>
<comment>%1 and %2 are numbers, e.g. 3 (10 max)</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="412"/>
<location filename="../gui/properties/propertieswidget.cpp" line="416"/>
<source>%1 (%2 total)</source>
<comment>%1 and %2 are numbers, e.g. 3 (10 total)</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="420"/>
<location filename="../gui/properties/propertieswidget.cpp" line="424"/>
<source>%1 (%2 avg.)</source>
<comment>%1 and %2 are speed rates, e.g. 200KiB/s (100KiB/s avg.)</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="565"/>
<source>Open</source>
<translation>Ouvert</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="566"/>
<source>Open Containing Folder</source>
<translation>Ouvrir le contenu du dossier</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="567"/>
<source>Rename...</source>
<translation>Renommer…</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="572"/>
<source>Priority</source>
<translation>Priorité</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="618"/>
<source>New Web seed</source>
<translation>Nouvelle source web</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="624"/>
<source>Remove Web seed</source>
<translation>Supprimer la source web</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="626"/>
<source>Copy Web seed URL</source>
<translation>Copier l'URL de la source web</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="627"/>
<source>Edit Web seed URL</source>
<translation>Modifier l'URL de la source web</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="652"/>
<source>Rename the file</source>
<translation>Renommer le fichier</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="653"/>
<source>New name:</source>
<translation>Nouveau nom :</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="657"/>
<location filename="../gui/properties/propertieswidget.cpp" line="688"/>
<source>The file could not be renamed</source>
<translation>Le fichier n'a pas pu être renommé</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="658"/>
<source>This file name contains forbidden characters, please choose a different one.</source>
<translation>Ce nom de fichier contient des caractères interdits, veuillez en choisir un autre.</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="689"/>
<location filename="../gui/properties/propertieswidget.cpp" line="727"/>
<source>This name is already in use in this folder. Please use a different name.</source>
<translation>Ce nom est déjà utilisé au sein de ce dossier. Veuillez choisir un nom différent.</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="726"/>
<source>The folder could not be renamed</source>
<translation>Le dossier n'a pas pu être renommé</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="829"/>
<source>qBittorrent</source>
<translation>qBittorrent</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="81"/>
<source>Filter files...</source>
<translation>Filtrer les fichiers…</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="772"/>
<source>New URL seed</source>
<comment>New HTTP source</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="773"/>
<source>New URL seed:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="779"/>
<location filename="../gui/properties/propertieswidget.cpp" line="830"/>
<source>This URL seed is already in the list.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="822"/>
<source>Web seed editing</source>
<translation>Modification de la source web</translation>
</message>
<message>
<location filename="../gui/properties/propertieswidget.cpp" line="823"/>
<source>Web seed URL:</source>
<translation>URL de la source web :</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<location filename="../webui/abstractwebapplication.cpp" line="110"/>
<source>Your IP address has been banned after too many failed authentication attempts.</source>
<translation>Votre adresse IP a été bannie après un nombre excessif de tentatives d'authentification échouées.</translation>
</message>
<message>
<location filename="../webui/webapplication.cpp" line="340"/>
<source>Error: '%1' is not a valid torrent file.
</source>
<translation>Erreur : « %1 » n'est pas un fichier torrent valide.
</translation>
</message>
<message>
<location filename="../webui/webapplication.cpp" line="345"/>
<source>Error: Could not add torrent to session.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../webui/webapplication.cpp" line="354"/>
<source>I/O Error: Could not create temporary file.</source>
<translation>Erreur d'entrée/sortie : le fichier temporaire n'a pas pu être créé.</translation>
</message>
<message>
<location filename="../app/main.cpp" line="140"/>
<source>%1 is an unknown command line parameter.</source>
<comment>--random-parameter is an unknown command line parameter.</comment>
<translation> %1 est un paramètre de ligne de commande inconnu.</translation>
</message>
<message>
<location filename="../app/main.cpp" line="152"/>
<location filename="../app/main.cpp" line="165"/>
<source>%1 must be the single command line parameter.</source>
<translation>%1 doit être le paramètre de ligne de commande unique.</translation>
</message>
<message>
<location filename="../app/main.cpp" line="175"/>
<source>%1 must specify the correct port (1 to 65535).</source>
<translation>%1 doit spécifier le port correct (1 à 65535).</translation>
</message>
<message>
<location filename="../app/main.cpp" line="199"/>
<source>You cannot use %1: qBittorrent is already running for this user.</source>
<translation>Vous ne pouvez pas utiliser% 1: qBittorrent est déjà en cours d'exécution pour cet utilisateur.</translation>
</message>
<message>
<location filename="../app/main.cpp" line="384"/>
<source>Usage:</source>
<translation>Utilisation :</translation>
</message>
<message>
<location filename="../app/main.cpp" line="397"/>
<source>Options:</source>
<translation>Options</translation>
</message>
<message>
<location filename="../app/main.cpp" line="399"/>
<source>Displays program version</source>
<translation>Afficher la version du programme</translation>
</message>
<message>
<location filename="../app/main.cpp" line="401"/>
<source>Displays this help message</source>
<translation>Afficher ce message d'aide</translation>
</message>
<message>
<location filename="../app/main.cpp" line="403"/>
<source>Changes the Web UI port (current: %1)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../app/main.cpp" line="406"/>
<source>Disable splash screen</source>
<translation>Désactiver l'écran de démarrage</translation>
</message>
<message>
<location filename="../app/main.cpp" line="408"/>
<source>Run in daemon-mode (background)</source>
<translation>Exécuter en tâche de fond</translation>
</message>
<message>
<location filename="../app/main.cpp" line="410"/>
<source>Downloads the torrents passed by the user</source>
<translation>Télécharger les torrents transmis par l'utilisateur</translation>
</message>
<message>
<location filename="../app/main.cpp" line="420"/>
<source>Help</source>
<translation>Aide</translation>
</message>
<message>
<location filename="../app/main.cpp" line="429"/>
<source>Run application with -h option to read about command line parameters.</source>
<translation>Exécuter le programme avec l'option -h pour afficher les paramètres de ligne de commande.</translation>
</message>
<message>
<location filename="../app/main.cpp" line="431"/>
<source>Bad command line</source>
<translation>Mauvaise ligne de commande</translation>
</message>
<message>
<location filename="../app/main.cpp" line="437"/>
<source>Bad command line: </source>
<translation>Mauvaise ligne de commande :</translation>
</message>
<message>
<location filename="../app/main.cpp" line="450"/>
<source>Legal Notice</source>
<translation>Information légale</translation>
</message>
<message>
<location filename="../app/main.cpp" line="451"/>
<location filename="../app/main.cpp" line="461"/>
<source>qBittorrent is a file sharing program. When you run a torrent, its data will be made available to others by means of upload. Any content you share is your sole responsibility.
No further notices will be issued.</source>
<translation>qBittorrent est un logiciel de partage de fichiers. Lors de l'ajout d'un torrent, les données que vous téléchargez sont mises à disposition des autres utilisateurs. Vous êtes responsable du contenu que vous partagez.
Ce message d'avertissement ne sera plus affiché.</translation>
</message>
<message>
<location filename="../app/main.cpp" line="452"/>
<source>Press %1 key to accept and continue...</source>
<translation>Appuyez sur la touche %1 pour accepter et continuer…</translation>
</message>
<message>
<location filename="../app/main.cpp" line="462"/>
<source>Legal notice</source>
<translation>Information légale</translation>
</message>
<message>
<location filename="../app/main.cpp" line="463"/>
<source>Cancel</source>
<translation>Annuler</translation>
</message>
<message>
<location filename="../app/main.cpp" line="464"/>
<source>I Agree</source>
<translation>J'accepte</translation>
</message>
<message>
<location filename="../app/application.cpp" line="122"/>
<source>Torrent name: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../app/application.cpp" line="123"/>
<source>Torrent size: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../app/application.cpp" line="124"/>
<source>Save path: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../app/application.cpp" line="125"/>
<source>The torrent was downloaded in %1.</source>
<comment>The torrent was downloaded in 1 hour and 20 seconds</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../app/application.cpp" line="128"/>
<source>Thank you for using qBittorrent.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../app/application.cpp" line="134"/>
<source>[qBittorrent] '%1' has finished downloading</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="204"/>
<source>The remote host name was not found (invalid hostname)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="206"/>
<source>The operation was canceled</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="208"/>
<source>The remote server closed the connection prematurely, before the entire reply was received and processed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="210"/>
<source>The connection to the remote server timed out</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="212"/>
<source>SSL/TLS handshake failed</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="214"/>
<source>The remote server refused the connection</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="216"/>
<source>The connection to the proxy server was refused</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="218"/>
<source>The proxy server closed the connection prematurely</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="220"/>
<source>The proxy host name was not found</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="222"/>
<source>The connection to the proxy timed out or the proxy did not reply in time to the request sent</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="224"/>
<source>The proxy requires authentication in order to honor the request but did not accept any credentials offered</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="226"/>
<source>The access to the remote content was denied (401)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="228"/>
<source>The operation requested on the remote content is not permitted</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="230"/>
<source>The remote content was not found at the server (404)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="232"/>
<source>The remote server requires authentication to serve the content but the credentials provided were not accepted</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="234"/>
<source>The Network Access API cannot honor the request because the protocol is not known</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="236"/>
<source>The requested operation is invalid for this protocol</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="238"/>
<source>An unknown network-related error was detected</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="240"/>
<source>An unknown proxy-related error was detected</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="242"/>
<source>An unknown error related to the remote content was detected</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="244"/>
<source>A breakdown in protocol was detected</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/net/downloadhandler.cpp" line="246"/>
<source>Unknown error</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../app/upgrade.h" line="50"/>
<location filename="../app/upgrade.h" line="63"/>
<source>Upgrade</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../app/upgrade.h" line="53"/>
<source>You updated from an older version that saved things differently. You must migrate to the new saving system. You will not be able to use an older version than v3.3.0 again. Continue? [y/n]</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../app/upgrade.h" line="62"/>
<source>You updated from an older version that saved things differently. You must migrate to the new saving system. If you continue, you will not be able to use an older version than v3.3.0 again.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../app/upgrade.h" line="121"/>
<source>Couldn't migrate torrent with hash: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../app/upgrade.h" line="124"/>
<source>Couldn't migrate torrent. Invalid fastresume file name: %1</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>RSS</name>
<message>
<location filename="../gui/rss/rss.ui" line="17"/>
<source>Search</source>
<translation>Rechercher</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="31"/>
<source>New subscription</source>
<translation>Nouvelle souscription</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="47"/>
<location filename="../gui/rss/rss.ui" line="195"/>
<location filename="../gui/rss/rss.ui" line="198"/>
<source>Mark items read</source>
<translation>Marquer comme lu</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="66"/>
<source>Update all</source>
<translation>Tout mettre à jour</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="95"/>
<source>RSS Downloader...</source>
<translation>Téléchargeur de RSS…</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="102"/>
<source>Settings...</source>
<translation>Paramètres…</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="124"/>
<source>Torrents: (double-click to download)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="158"/>
<location filename="../gui/rss/rss.ui" line="161"/>
<source>Delete</source>
<translation>Supprimer</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="166"/>
<source>Rename...</source>
<translation>Renommer…</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="169"/>
<source>Rename</source>
<translation>Renommer</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="174"/>
<location filename="../gui/rss/rss.ui" line="177"/>
<source>Update</source>
<translation>Mettre à jour</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="182"/>
<source>New subscription...</source>
<translation>Nouvelle souscription…</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="187"/>
<location filename="../gui/rss/rss.ui" line="190"/>
<source>Update all feeds</source>
<translation>Tout mettre à jour</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="203"/>
<source>Download torrent</source>
<translation>Télécharger le torrent</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="208"/>
<source>Open news URL</source>
<translation>Ouvrir l'URL de l'article</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="213"/>
<source>Copy feed URL</source>
<translation>Copier l'URL du flux</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="218"/>
<source>New folder...</source>
<translation>Nouveau dossier…</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="223"/>
<source>Manage cookies...</source>
<translation>Gestion des cookies…</translation>
</message>
<message>
<location filename="../gui/rss/rss.ui" line="63"/>
<source>Refresh RSS streams</source>
<translation>Rafraîchir les flux RSS</translation>
</message>
</context>
<context>
<name>RSSImp</name>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="218"/>
<source>Stream URL:</source>
<translation>URL du flux :</translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="218"/>
<source>Please type a RSS stream URL</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="228"/>
<source>This RSS feed is already in the list.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="173"/>
<source>Please choose a folder name</source>
<translation>Veuillez indiquer un nom de dossier</translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="173"/>
<source>Folder name:</source>
<translation>Nom du dossier :</translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="173"/>
<source>New folder</source>
<translation>Nouveau dossier</translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="254"/>
<source>Deletion confirmation</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="255"/>
<source>Are you sure you want to delete the selected RSS feeds?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="406"/>
<source>Please choose a new name for this RSS feed</source>
<translation>Veuillez choisir un nouveau nom pour ce flux RSS</translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="406"/>
<source>New feed name:</source>
<translation>Nouveau nom du flux :</translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="410"/>
<source>Name already in use</source>
<translation>Nom déjà utilisé</translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="410"/>
<source>This name is already used by another item, please choose another one.</source>
<translation>Ce nom est déjà utilisé par un autre élément, veuillez en choisir un autre.</translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="580"/>
<source>Date: </source>
<translation>Date : </translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="582"/>
<source>Author: </source>
<translation>Auteur : </translation>
</message>
<message>
<location filename="../gui/rss/rss_imp.cpp" line="659"/>
<source>Unread</source>
<translation>Non lu</translation>
</message>
</context>
<context>
<name>RssFeed</name>
<message>
<location filename="../gui/rss/rssfeed.cpp" line="368"/>
<source>Automatic download of '%1' from '%2' RSS feed failed because it doesn't contain a torrent or a magnet link...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/rss/rssfeed.cpp" line="373"/>
<source>Automatically downloading '%1' torrent from '%2' RSS feed...</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>RssParser</name>
<message>
<location filename="../gui/rss/rssparser.cpp" line="464"/>
<source>Failed to open downloaded RSS file.</source>
<translation>Échec de l'ouverture du fichier RSS téléchargé.</translation>
</message>
<message>
<location filename="../gui/rss/rssparser.cpp" line="501"/>
<source>Invalid RSS feed at '%1'.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>RssSettingsDlg</name>
<message>
<location filename="../gui/rss/rsssettingsdlg.ui" line="14"/>
<source>RSS Reader Settings</source>
<translation>Paramètres du lecteur RSS</translation>
</message>
<message>
<location filename="../gui/rss/rsssettingsdlg.ui" line="47"/>
<source>RSS feeds refresh interval:</source>
<translation>Intervalle de rafraîchissement des flux RSS :</translation>
</message>
<message>
<location filename="../gui/rss/rsssettingsdlg.ui" line="70"/>
<source>minutes</source>
<translation>minutes</translation>
</message>
<message>
<location filename="../gui/rss/rsssettingsdlg.ui" line="77"/>
<source>Maximum number of articles per feed:</source>
<translation>Numbre maximum d'articles par flux :</translation>
</message>
</context>
<context>
<name>ScanFoldersModel</name>
<message>
<location filename="../core/scanfoldersmodel.cpp" line="157"/>
<source>Watched Folder</source>
<translation>Répertoire surveillé</translation>
</message>
<message>
<location filename="../core/scanfoldersmodel.cpp" line="160"/>
<source>Download here</source>
<translation>Télécharger ici</translation>
</message>
<message>
<location filename="../core/scanfoldersmodel.cpp" line="163"/>
<source>Download path</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>SearchCategories</name>
<message>
<location filename="../searchengine/supportedengines.h" line="53"/>
<source>All categories</source>
<translation>Toutes catégories</translation>
</message>
<message>
<location filename="../searchengine/supportedengines.h" line="54"/>
<source>Movies</source>
<translation>Films</translation>
</message>
<message>
<location filename="../searchengine/supportedengines.h" line="55"/>
<source>TV shows</source>
<translation>Séries TV</translation>
</message>
<message>
<location filename="../searchengine/supportedengines.h" line="56"/>
<source>Music</source>
<translation>Musique</translation>
</message>
<message>
<location filename="../searchengine/supportedengines.h" line="57"/>
<source>Games</source>
<translation>Jeux</translation>
</message>
<message>
<location filename="../searchengine/supportedengines.h" line="58"/>
<source>Anime</source>
<translation>Animé</translation>
</message>
<message>
<location filename="../searchengine/supportedengines.h" line="59"/>
<source>Software</source>
<translation>Logiciels</translation>
</message>
<message>
<location filename="../searchengine/supportedengines.h" line="60"/>
<source>Pictures</source>
<translation>Photos</translation>
</message>
<message>
<location filename="../searchengine/supportedengines.h" line="61"/>
<source>Books</source>
<translation>Livres</translation>
</message>
</context>
<context>
<name>SearchEngine</name>
<message>
<location filename="../searchengine/searchengine.cpp" line="190"/>
<location filename="../searchengine/searchengine.cpp" line="220"/>
<location filename="../searchengine/searchengine.cpp" line="479"/>
<source>Search</source>
<translation>Rechercher</translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="203"/>
<source>Please install Python to use the Search Engine.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="233"/>
<source>Empty search pattern</source>
<translation>Motif de recherche vide</translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="233"/>
<source>Please type a search pattern first</source>
<translation>Veuillez entrer un motif de recherche</translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="313"/>
<source>Searching...</source>
<translation>Recherche en cours…</translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="316"/>
<source>Stop</source>
<translation>Arrêter</translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="203"/>
<location filename="../searchengine/searchengine.cpp" line="453"/>
<source>Search Engine</source>
<translation>Moteur de recherche</translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="453"/>
<location filename="../searchengine/searchengine.cpp" line="474"/>
<source>Search has finished</source>
<translation>Fin de la recherche</translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="463"/>
<source>An error occurred during search...</source>
<translation>Une erreur s'est produite lors de la recherche…</translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="461"/>
<location filename="../searchengine/searchengine.cpp" line="468"/>
<source>Search aborted</source>
<translation>La recherche a été interrompue</translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="118"/>
<source>All enabled</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="119"/>
<source>All engines</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="122"/>
<location filename="../searchengine/searchengine.cpp" line="177"/>
<source>Multiple...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="262"/>
<location filename="../searchengine/searchengine.cpp" line="334"/>
<source>Results <i>(%1)</i>:</source>
<comment>i.e: Search results</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="472"/>
<source>Search returned no results</source>
<translation>La recherche n'a retourné aucun résultat</translation>
</message>
<message>
<location filename="../searchengine/searchengine.cpp" line="561"/>
<source>Stopped</source>
<translation type="unfinished">Arrêtée</translation>
</message>
</context>
<context>
<name>SearchListDelegate</name>
<message>
<location filename="../searchengine/searchlistdelegate.h" line="60"/>
<location filename="../searchengine/searchlistdelegate.h" line="64"/>
<source>Unknown</source>
<translation type="unfinished">Inconnue</translation>
</message>
</context>
<context>
<name>SearchTab</name>
<message>
<location filename="../searchengine/searchtab.cpp" line="66"/>
<source>Name</source>
<comment>i.e: file name</comment>
<translation>Nom</translation>
</message>
<message>
<location filename="../searchengine/searchtab.cpp" line="67"/>
<source>Size</source>
<comment>i.e: file size</comment>
<translation>Taille</translation>
</message>
<message>
<location filename="../searchengine/searchtab.cpp" line="68"/>
<source>Seeders</source>
<comment>i.e: Number of full sources</comment>
<translation>Sources complètes</translation>
</message>
<message>
<location filename="../searchengine/searchtab.cpp" line="69"/>
<source>Leechers</source>
<comment>i.e: Number of partial sources</comment>
<translation>Sources partielles</translation>
</message>
<message>
<location filename="../searchengine/searchtab.cpp" line="70"/>
<source>Search engine</source>
<translation>Moteur de recherche</translation>
</message>
</context>
<context>
<name>ShutdownConfirmDlg</name>
<message>
<location filename="../gui/shutdownconfirm.cpp" line="45"/>
<source>Exit confirmation</source>
<translation>Confirmation de quitter</translation>
</message>
<message>
<location filename="../gui/shutdownconfirm.cpp" line="46"/>
<source>Exit now</source>
<translation>Quitter maintenant</translation>
</message>
<message>
<location filename="../gui/shutdownconfirm.cpp" line="49"/>
<source>Shutdown confirmation</source>
<translation>Confirmation de l'extinction</translation>
</message>
<message>
<location filename="../gui/shutdownconfirm.cpp" line="50"/>
<source>Shutdown now</source>
<translation>Éteindre maintenant</translation>
</message>
<message>
<location filename="../gui/shutdownconfirm.cpp" line="109"/>
<source>qBittorrent will now exit unless you cancel within the next %1 seconds.</source>
<translation>qBittorrent va s’éteindre à moins que vous annuliez dans les prochaines %1 secondes.</translation>
</message>
<message>
<location filename="../gui/shutdownconfirm.cpp" line="112"/>
<source>The computer will now be switched off unless you cancel within the next %1 seconds.</source>
<translation>L’ordinateur va s’éteindre à moins que l’annuliez dans les prochaines %1 secondes.</translation>
</message>
<message>
<location filename="../gui/shutdownconfirm.cpp" line="115"/>
<source>The computer will now go to sleep mode unless you cancel within the next %1 seconds.</source>
<translation>L’ordinateur va passer en mode veille à moins que vous l’annuliez dans les %1 prochaines secondes.</translation>
</message>
<message>
<location filename="../gui/shutdownconfirm.cpp" line="118"/>
<source>The computer will now go to hibernation mode unless you cancel within the next %1 seconds.</source>
<translation>L’ordinateur va hiberner à moins que vous l’annuliez dans les %1 prochaines secondes.</translation>
</message>
</context>
<context>
<name>SpeedLimitDialog</name>
<message>
<location filename="../gui/speedlimitdlg.cpp" line="78"/>
<source>KiB/s</source>
<translation>Kio/s</translation>
</message>
</context>
<context>
<name>SpeedPlotView</name>
<message>
<location filename="../gui/properties/speedplotview.cpp" line="47"/>
<source>Total Upload</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedplotview.cpp" line="48"/>
<source>Total Download</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedplotview.cpp" line="52"/>
<source>Payload Upload</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedplotview.cpp" line="53"/>
<source>Payload Download</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedplotview.cpp" line="57"/>
<source>Overhead Upload</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedplotview.cpp" line="58"/>
<source>Overhead Download</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedplotview.cpp" line="62"/>
<source>DHT Upload</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedplotview.cpp" line="63"/>
<source>DHT Download</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedplotview.cpp" line="67"/>
<source>Tracker Upload</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedplotview.cpp" line="68"/>
<source>Tracker Download</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>SpeedWidget</name>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="68"/>
<source>Period:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="71"/>
<source>1 Minute</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="72"/>
<source>5 Minutes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="73"/>
<source>30 Minutes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="74"/>
<source>6 Hours</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="103"/>
<source>Select Graphs</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="79"/>
<source>Total Upload</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="80"/>
<source>Total Download</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="81"/>
<source>Payload Upload</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="82"/>
<source>Payload Download</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="83"/>
<source>Overhead Upload</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="84"/>
<source>Overhead Download</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="85"/>
<source>DHT Upload</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="86"/>
<source>DHT Download</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="87"/>
<source>Tracker Upload</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/speedwidget.cpp" line="88"/>
<source>Tracker Download</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>StatsDialog</name>
<message>
<location filename="../gui/statsdialog.ui" line="14"/>
<source>Statistics</source>
<translation>Statistiques</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="20"/>
<source>User statistics</source>
<translation>Statistiques utilisateur</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="26"/>
<source>Total peer connections:</source>
<translation>Nombre total de connexions aux pairs :</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="33"/>
<source>Global ratio:</source>
<translation>Ratio global :</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="47"/>
<source>Alltime download:</source>
<translation>Téléchargé depuis la première utilisation :</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="68"/>
<source>Alltime upload:</source>
<translation>Envoyé depuis la première utilisation :</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="82"/>
<source>Total waste (this session):</source>
<translation>Total gaspillé (durant cette session) :</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="99"/>
<source>Cache statistics</source>
<translation>Statistiques du tampon</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="105"/>
<source>Read cache Hits:</source>
<translation>Succès de tampon en lecture :</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="126"/>
<source>Total buffers size:</source>
<translation>Taille totale des buffers :</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="136"/>
<source>Performance statistics</source>
<translation>Statistiques de performance</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="170"/>
<source>Queued I/O jobs:</source>
<translation>Actions d'E/S en file d'attente :</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="177"/>
<source>Write cache overload:</source>
<translation>Surcharge du tampon d'écriture :</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="184"/>
<source>Average time in queue (ms):</source>
<translation>Temps moyen passé en file d'attente (ms) :</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="191"/>
<source>Read cache overload:</source>
<translation>Surcharge du tampon de lecture :</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="198"/>
<source>Total queued size:</source>
<translation>Taille totale des fichiers en file d'attente :</translation>
</message>
<message>
<location filename="../gui/statsdialog.ui" line="243"/>
<source>OK</source>
<translation>OK</translation>
</message>
</context>
<context>
<name>StatusBar</name>
<message>
<location filename="../gui/statusbar.cpp" line="59"/>
<location filename="../gui/statusbar.cpp" line="171"/>
<source>Connection status:</source>
<translation>Statut de la connexion :</translation>
</message>
<message>
<location filename="../gui/statusbar.cpp" line="59"/>
<location filename="../gui/statusbar.cpp" line="171"/>
<source>No direct connections. This may indicate network configuration problems.</source>
<translation>Aucune connexion directe. Ceci peut être signe d'une mauvaise configuration réseau.</translation>
</message>
<message>
<location filename="../gui/statusbar.cpp" line="73"/>
<location filename="../gui/statusbar.cpp" line="178"/>
<source>DHT: %1 nodes</source>
<translation>DHT : %1 nœuds</translation>
</message>
<message>
<location filename="../gui/statusbar.cpp" line="141"/>
<source>qBittorrent needs to be restarted</source>
<translation>qBittorrent doit être redémarré</translation>
</message>
<message>
<location filename="../gui/statusbar.cpp" line="151"/>
<source>qBittorrent was just updated and needs to be restarted for the changes to be effective.</source>
<translation>qBittorrent vient d'être mis à jour et doit être redémarré pour que les changements soient pris en compte.</translation>
</message>
<message>
<location filename="../gui/statusbar.cpp" line="163"/>
<location filename="../gui/statusbar.cpp" line="168"/>
<source>Connection Status:</source>
<translation>État de la connexion :</translation>
</message>
<message>
<location filename="../gui/statusbar.cpp" line="163"/>
<source>Offline. This usually means that qBittorrent failed to listen on the selected port for incoming connections.</source>
<translation>Hors ligne. Ceci signifie généralement que qBittorrent s'a pas pu se mettre en écoute sur le port défini pour les connexions entrantes.</translation>
</message>
<message>
<location filename="../gui/statusbar.cpp" line="168"/>
<source>Online</source>
<translation>Connecté</translation>
</message>
<message>
<location filename="../gui/statusbar.cpp" line="203"/>
<source>Click to switch to alternative speed limits</source>
<translation>Cliquez ici pour utiliser les limites de vitesse alternatives</translation>
</message>
<message>
<location filename="../gui/statusbar.cpp" line="199"/>
<source>Click to switch to regular speed limits</source>
<translation>Cliquez ici pour utiliser les limites de vitesse normales</translation>
</message>
<message>
<location filename="../gui/statusbar.cpp" line="212"/>
<source>Manual change of rate limits mode. The scheduler is disabled.</source>
<translation>Mode de changement manuel des limites de taux. Le planificateur est désactivé.</translation>
</message>
<message>
<location filename="../gui/statusbar.cpp" line="219"/>
<source>Global Download Speed Limit</source>
<translation>Limite globale de la vitesse de réception</translation>
</message>
<message>
<location filename="../gui/statusbar.cpp" line="245"/>
<source>Global Upload Speed Limit</source>
<translation>Limite globale de la vitesse d'envoi</translation>
</message>
</context>
<context>
<name>StatusFiltersWidget</name>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="117"/>
<source>All (0)</source>
<comment>this is for the status filter</comment>
<translation>Tous (0)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="120"/>
<source>Downloading (0)</source>
<translation>En Téléchargement (0)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="123"/>
<source>Seeding (0)</source>
<translation>En Partage (0)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="126"/>
<source>Completed (0)</source>
<translation>Terminés (0)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="129"/>
<source>Resumed (0)</source>
<translation>Démarrés (0)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="132"/>
<source>Paused (0)</source>
<translation>En Pause (0)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="135"/>
<source>Active (0)</source>
<translation>Actifs (0)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="138"/>
<source>Inactive (0)</source>
<translation>Inactifs (0)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="141"/>
<source>Errored (0)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="158"/>
<source>All (%1)</source>
<translation>Tous (%1)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="159"/>
<source>Downloading (%1)</source>
<translation>En Téléchargement (%1)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="160"/>
<source>Seeding (%1)</source>
<translation>En Partage (%1)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="161"/>
<source>Completed (%1)</source>
<translation>Terminés (%1)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="162"/>
<source>Paused (%1)</source>
<translation>En Pause (%1)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="163"/>
<source>Resumed (%1)</source>
<translation>Démarrés (%1)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="164"/>
<source>Active (%1)</source>
<translation>Actifs (%1)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="165"/>
<source>Inactive (%1)</source>
<translation>Inactifs (%1)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="166"/>
<source>Errored (%1)</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TorrentContentModel</name>
<message>
<location filename="../gui/torrentcontentmodel.cpp" line="56"/>
<source>Name</source>
<translation>Nom</translation>
</message>
<message>
<location filename="../gui/torrentcontentmodel.cpp" line="56"/>
<source>Size</source>
<translation>Taille</translation>
</message>
<message>
<location filename="../gui/torrentcontentmodel.cpp" line="57"/>
<source>Progress</source>
<translation>Progression</translation>
</message>
<message>
<location filename="../gui/torrentcontentmodel.cpp" line="57"/>
<source>Priority</source>
<translation>Priorité</translation>
</message>
</context>
<context>
<name>TorrentCreatorDlg</name>
<message>
<location filename="../gui/torrentcreatordlg.cpp" line="78"/>
<source>Select a folder to add to the torrent</source>
<translation>Sélectionner un dossier à ajouter au torrent</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.cpp" line="92"/>
<source>Select a file to add to the torrent</source>
<translation>Sélectionner un fichier à ajouter au torrent</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.cpp" line="114"/>
<source>No input path set</source>
<translation>Aucun fichier inclu</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.cpp" line="114"/>
<source>Please type an input path first</source>
<translation>Veuillez sélectionner un fichier ou un dossier à inclure d'abord</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.cpp" line="124"/>
<source>Select destination torrent file</source>
<translation>Sélectionner le torrent à créer</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.cpp" line="124"/>
<source>Torrent Files (*.torrent)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.cpp" line="176"/>
<source>Torrent was created successfully: %1</source>
<comment>%1 is the path of the torrent</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.cpp" line="152"/>
<location filename="../gui/torrentcreatordlg.cpp" line="165"/>
<location filename="../gui/torrentcreatordlg.cpp" line="176"/>
<source>Torrent creation</source>
<translation>Création d'un torrent</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.cpp" line="152"/>
<source>Torrent creation was unsuccessful, reason: %1</source>
<translation>La création du torrent a échoué, raison : %1</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.cpp" line="165"/>
<source>Created torrent file is invalid. It won't be added to download list.</source>
<translation>Le torrent créé est invalide. Il ne sera pas ajouté à la liste des téléchargements.</translation>
</message>
</context>
<context>
<name>TorrentImportDlg</name>
<message>
<location filename="../gui/torrentimportdlg.ui" line="14"/>
<source>Torrent Import</source>
<translation>Import de torrent</translation>
</message>
<message>
<location filename="../gui/torrentimportdlg.ui" line="53"/>
<source>This assistant will help you share with qBittorrent a torrent that you have already downloaded.</source>
<translation>Cet assistant va vous aider à partager avec qBittorrent un torrent que vous avez déjà téléchargé.</translation>
</message>
<message>
<location filename="../gui/torrentimportdlg.ui" line="65"/>
<source>Torrent file to import:</source>
<translation>Fichier torrent à importer :</translation>
</message>
<message>
<location filename="../gui/torrentimportdlg.ui" line="109"/>
<source>...</source>
<translation>…</translation>
</message>
<message>
<location filename="../gui/torrentimportdlg.ui" line="90"/>
<source>Content location:</source>
<translation>Chemin vers le contenu :</translation>
</message>
<message>
<location filename="../gui/torrentimportdlg.ui" line="121"/>
<source>Skip the data checking stage and start seeding immediately</source>
<translation>Ne pas procéder à la vérification et partager directement le torrent</translation>
</message>
<message>
<location filename="../gui/torrentimportdlg.ui" line="131"/>
<source>Import</source>
<translation>Importer</translation>
</message>
<message>
<location filename="../gui/torrentimportdlg.cpp" line="65"/>
<source>Torrent file to import</source>
<translation>Fichier torrent à importer</translation>
</message>
<message>
<location filename="../gui/torrentimportdlg.cpp" line="65"/>
<source>Torrent files</source>
<translation>Fichiers torrent</translation>
</message>
<message>
<location filename="../gui/torrentimportdlg.cpp" line="89"/>
<source>'%1' Files</source>
<comment>%1 is a file extension (e.g. PDF)</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/torrentimportdlg.cpp" line="91"/>
<source>Please provide the location of '%1'</source>
<comment>%1 is a file name</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/torrentimportdlg.cpp" line="124"/>
<source>Please point to the location of the torrent: %1</source>
<translation>Veuillez indiquer le chemin vers le contenu du torrent : %1</translation>
</message>
<message>
<location filename="../gui/torrentimportdlg.cpp" line="222"/>
<source>Invalid torrent file</source>
<translation>Fichier torrent invalide</translation>
</message>
<message>
<location filename="../gui/torrentimportdlg.cpp" line="222"/>
<source>This is not a valid torrent file.</source>
<translation>Il ne s'agit pas d'un fichier torrent valide.</translation>
</message>
</context>
<context>
<name>TorrentModel</name>
<message>
<location filename="../gui/torrentmodel.cpp" line="97"/>
<source>Name</source>
<comment>i.e: torrent name</comment>
<translation>Nom</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="98"/>
<source>Size</source>
<comment>i.e: torrent size</comment>
<translation>Taille</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="99"/>
<source>Done</source>
<comment>% Done</comment>
<translation>Progression</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="100"/>
<source>Status</source>
<comment>Torrent status (e.g. downloading, seeding, paused)</comment>
<translation>Statut</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="101"/>
<source>Seeds</source>
<comment>i.e. full sources (often untranslated)</comment>
<translation>Sources</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="102"/>
<source>Peers</source>
<comment>i.e. partial sources (often untranslated)</comment>
<translation>Pairs</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="103"/>
<source>Down Speed</source>
<comment>i.e: Download speed</comment>
<translation>Vitesse DL</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="104"/>
<source>Up Speed</source>
<comment>i.e: Upload speed</comment>
<translation>Vitesse UP</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="105"/>
<source>Ratio</source>
<comment>Share ratio</comment>
<translation>Ratio</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="106"/>
<source>ETA</source>
<comment>i.e: Estimated Time of Arrival / Time left</comment>
<translation>Temps restant</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="107"/>
<source>Label</source>
<translation>Catégorie</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="108"/>
<source>Added On</source>
<comment>Torrent was added to transfer list on 01/01/2010 08:00</comment>
<translation>Ajouté le</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="109"/>
<source>Completed On</source>
<comment>Torrent was completed on 01/01/2010 08:00</comment>
<translation>Terminé le</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="110"/>
<source>Tracker</source>
<translation>Tracker</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="111"/>
<source>Down Limit</source>
<comment>i.e: Download limit</comment>
<translation>Limite réception</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="112"/>
<source>Up Limit</source>
<comment>i.e: Upload limit</comment>
<translation>Limite envoi</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="113"/>
<source>Downloaded</source>
<comment>Amount of data downloaded (e.g. in MB)</comment>
<translation>Téléchargé</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="114"/>
<source>Uploaded</source>
<comment>Amount of data uploaded (e.g. in MB)</comment>
<translation>Envoyé</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="115"/>
<source>Session Download</source>
<comment>Amount of data downloaded since program open (e.g. in MB)</comment>
<translation>Téléchargement de la session</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="116"/>
<source>Session Upload</source>
<comment>Amount of data uploaded since program open (e.g. in MB)</comment>
<translation>Émission de la session</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="117"/>
<source>Remaining</source>
<comment>Amount of data left to download (e.g. in MB)</comment>
<translation>Restant</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="118"/>
<source>Time Active</source>
<comment>Time (duration) the torrent is active (not paused)</comment>
<translation>Actif pendant</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="119"/>
<source>Save path</source>
<comment>Torrent save path</comment>
<translation>Chemin d'enregistrement</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="120"/>
<source>Completed</source>
<comment>Amount of data completed (e.g. in MB)</comment>
<translation>Terminé</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="121"/>
<source>Ratio Limit</source>
<comment>Upload share ratio limit</comment>
<translation>Limite de ratio</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="122"/>
<source>Last Seen Complete</source>
<comment>Indicates the time when the torrent was last seen complete/whole</comment>
<translation>Dernière fois vu complet</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="123"/>
<source>Last Activity</source>
<comment>Time passed since a chunk was downloaded/uploaded</comment>
<translation>Dernière activité</translation>
</message>
<message>
<location filename="../gui/torrentmodel.cpp" line="124"/>
<source>Total Size</source>
<comment>i.e. Size including unwanted data</comment>
<translation>Taille totale</translation>
</message>
</context>
<context>
<name>TrackerFiltersList</name>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="428"/>
<source>All (0)</source>
<comment>this is for the label filter</comment>
<translation>Tous (0)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="431"/>
<source>Trackerless (0)</source>
<translation>Sans tracker (0)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="434"/>
<source>Error (0)</source>
<translation>Erreur (0)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="437"/>
<source>Warning (0)</source>
<translation>Alerte (0)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="478"/>
<location filename="../gui/transferlistfilterswidget.cpp" line="535"/>
<source>Trackerless (%1)</source>
<translation>Sans tracker (%1)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="484"/>
<location filename="../gui/transferlistfilterswidget.cpp" line="530"/>
<source>%1 (%2)</source>
<comment>openbittorrent.com (10)</comment>
<translation>%1 (%2)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="560"/>
<location filename="../gui/transferlistfilterswidget.cpp" line="592"/>
<source>Error (%1)</source>
<translation>Erreur (%1)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="573"/>
<location filename="../gui/transferlistfilterswidget.cpp" line="607"/>
<source>Warning (%1)</source>
<translation>Alerte (%1)</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="635"/>
<source>Couldn't decode favicon for URL '%1'. Trying to download favicon in PNG format.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="640"/>
<source>Couldn't decode favicon for URL '%1'.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="654"/>
<source>Couldn't download favicon for URL '%1'. Reason: %2</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="663"/>
<source>Resume torrents</source>
<translation>Démarrer les torrents</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="664"/>
<source>Pause torrents</source>
<translation>Mettre en pause les torrents</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="665"/>
<source>Delete torrents</source>
<translation>Supprimer les torrents</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="699"/>
<location filename="../gui/transferlistfilterswidget.cpp" line="713"/>
<source>All (%1)</source>
<comment>this is for the tracker filter</comment>
<translation>Tous (%1)</translation>
</message>
</context>
<context>
<name>TrackerList</name>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="69"/>
<source>URL</source>
<translation>URL</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="70"/>
<source>Status</source>
<translation>Statut</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="71"/>
<source>Peers</source>
<translation>Pairs</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="72"/>
<source>Message</source>
<translation>Message</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="217"/>
<location filename="../gui/properties/trackerlist.cpp" line="286"/>
<source>Working</source>
<translation>Fonctionne</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="218"/>
<source>Disabled</source>
<translation>Désactivé</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="239"/>
<source>This torrent is private</source>
<translation>Ce torrent est privé</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="290"/>
<source>Updating...</source>
<translation>Mise à jour…</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="294"/>
<source>Not working</source>
<translation>Ne fonctionne pas</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="298"/>
<source>Not contacted yet</source>
<translation>Pas encore contacté</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="380"/>
<source>Tracker URL:</source>
<translation>URL du tracker :</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="380"/>
<source>Tracker editing</source>
<translation>Modification du tracker</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="386"/>
<location filename="../gui/properties/trackerlist.cpp" line="397"/>
<source>Tracker editing failed</source>
<translation>Échec de la modification du tracker</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="386"/>
<source>The tracker URL entered is invalid.</source>
<translation>L'URL du tracker fourni est invalide. </translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="397"/>
<source>The tracker URL already exists.</source>
<translation>L'URL du tracker existe déjà.</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="448"/>
<source>Add a new tracker...</source>
<translation>Ajouter un nouveau tracker…</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="454"/>
<source>Copy tracker URL</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="455"/>
<source>Edit selected tracker URL</source>
<translation>Modifier l'URL du tracker sélectionné</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="460"/>
<source>Force reannounce to selected trackers</source>
<translation>Forcer une nouvelle annonce aux trackers sélectionnés</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="462"/>
<source>Force reannounce to all trackers</source>
<translation>Forcer une nouvelle annonce à tous les trackers</translation>
</message>
<message>
<location filename="../gui/properties/trackerlist.cpp" line="453"/>
<source>Remove tracker</source>
<translation>Supprimer le tracker</translation>
</message>
</context>
<context>
<name>TrackersAdditionDlg</name>
<message>
<location filename="../gui/properties/trackersadditiondlg.ui" line="14"/>
<source>Trackers addition dialog</source>
<translation>Fenêtre d'ajout de trackers</translation>
</message>
<message>
<location filename="../gui/properties/trackersadditiondlg.ui" line="20"/>
<source>List of trackers to add (one per line):</source>
<translation>Liste des trackers à ajouter (un par ligne) :</translation>
</message>
<message utf8="true">
<location filename="../gui/properties/trackersadditiondlg.ui" line="44"/>
<source>µTorrent compatible list URL:</source>
<translation>URL de la liste compatible avec µTorrent :</translation>
</message>
<message>
<location filename="../gui/properties/trackersadditiondlg.cpp" line="73"/>
<source>I/O Error</source>
<translation>Erreur E/S</translation>
</message>
<message>
<location filename="../gui/properties/trackersadditiondlg.cpp" line="73"/>
<source>Error while trying to open the downloaded file.</source>
<translation>Erreur à l'ouverture du fichier téléchargé.</translation>
</message>
<message>
<location filename="../gui/properties/trackersadditiondlg.cpp" line="111"/>
<source>No change</source>
<translation>Aucun changement</translation>
</message>
<message>
<location filename="../gui/properties/trackersadditiondlg.cpp" line="111"/>
<source>No additional trackers were found.</source>
<translation>Aucun tracker supplémentaire n'est disponible.</translation>
</message>
<message>
<location filename="../gui/properties/trackersadditiondlg.cpp" line="119"/>
<source>Download error</source>
<translation>Erreur de téléchargement</translation>
</message>
<message>
<location filename="../gui/properties/trackersadditiondlg.cpp" line="119"/>
<source>The trackers list could not be downloaded, reason: %1</source>
<translation>La liste de trackers n'a pas pu être téléchargée, raison : %1</translation>
</message>
</context>
<context>
<name>TransferListDelegate</name>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="97"/>
<source>Downloading</source>
<translation>En téléchargement</translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="103"/>
<source>Downloading metadata</source>
<comment>used when loading a magnet link</comment>
<translation>Téléchargement des métadonnées</translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="109"/>
<source>Allocating</source>
<comment>qBittorrent is allocating the files on disk</comment>
<translation>Attribution</translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="133"/>
<source>Paused</source>
<translation>En pause</translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="120"/>
<source>Queued</source>
<comment>i.e. torrent is queued</comment>
<translation>En file d'attente</translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="113"/>
<source>Seeding</source>
<comment>Torrent is complete and in upload-only mode</comment>
<translation>En partage</translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="100"/>
<source>Stalled</source>
<comment>Torrent is waiting for download to begin</comment>
<translation>En attente</translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="106"/>
<source>[F] Downloading</source>
<comment>used when the torrent is forced started. You probably shouldn't translate the F.</comment>
<translation>[F] Téléchargement</translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="116"/>
<source>[F] Seeding</source>
<comment>used when the torrent is forced started. You probably shouldn't translate the F.</comment>
<translation>[F] Émission</translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="124"/>
<source>Checking</source>
<comment>Torrent local data is being checked</comment>
<translation>Vérification</translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="127"/>
<source>Queued for checking</source>
<comment>i.e. torrent is queued for hash checking</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="130"/>
<source>Checking resume data</source>
<comment>used when loading the torrents from disk after qbt is launched. It checks the correctness of the .fastresume file. Normally it is completed in a fraction of a second, unless loading many many torrents.</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="136"/>
<source>Completed</source>
<translation>Terminé</translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="139"/>
<source>Missing Files</source>
<translation>Fichiers manquants</translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="142"/>
<source>Errored</source>
<comment>torrent status, the torrent has an error</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="172"/>
<source>%1 (seeded for %2)</source>
<comment>e.g. 4m39s (seeded for 3m10s)</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/transferlistdelegate.cpp" line="237"/>
<source>%1 ago</source>
<comment>e.g.: 1h 20m ago</comment>
<translation>il y a %1</translation>
</message>
</context>
<context>
<name>TransferListFiltersWidget</name>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="792"/>
<source>Status</source>
<translation>Statut</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="800"/>
<source>Labels</source>
<translation>Catégories</translation>
</message>
<message>
<location filename="../gui/transferlistfilterswidget.cpp" line="808"/>
<source>Trackers</source>
<translation>Trackers</translation>
</message>
</context>
<context>
<name>TransferListWidget</name>
<message>
<location filename="../gui/transferlistwidget.cpp" line="511"/>
<source>Column visibility</source>
<translation>Visibilité des colonnes</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="763"/>
<source>Label</source>
<translation>Catégorie</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="252"/>
<source>Choose save path</source>
<translation>Choix du répertoire de destination</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="439"/>
<source>Torrent Download Speed Limiting</source>
<translation>Limitation de la vitesse de réception</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="468"/>
<source>Torrent Upload Speed Limiting</source>
<translation>Limitation de la vitesse d'émission</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="499"/>
<source>Recheck confirmation</source>
<translation>Revérifier la confirmation</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="499"/>
<source>Are you sure you want to recheck the selected torrent(s)?</source>
<translation>Êtes-vous sur de vouloir revérifier le ou les torrent(s) sélectionné(s) ?</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="575"/>
<source>New Label</source>
<translation>Nouvelle catégorie</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="575"/>
<source>Label:</source>
<translation>Catégorie :</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="581"/>
<source>Invalid label name</source>
<translation>Nom de catégorie incorrect</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="581"/>
<source>Please don't use any special characters in the label name.</source>
<translation>N'utilisez pas de caractères spéciaux dans le nom de catégorie.</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="600"/>
<source>Rename</source>
<translation>Renommer</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="600"/>
<source>New name:</source>
<translation>Nouveau nom :</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="629"/>
<source>Resume</source>
<comment>Resume/start the torrent</comment>
<translation>Démarrer</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="633"/>
<source>Force Resume</source>
<comment>Force Resume/start the torrent</comment>
<translation>Forcer la reprise</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="631"/>
<source>Pause</source>
<comment>Pause the torrent</comment>
<translation>Mettre en pause</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="635"/>
<source>Delete</source>
<comment>Delete the torrent</comment>
<translation>Supprimer</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="637"/>
<source>Preview file...</source>
<translation>Prévisualiser le fichier…</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="639"/>
<source>Limit share ratio...</source>
<translation>Limiter le ratio de partage…</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="641"/>
<source>Limit upload rate...</source>
<translation>Limiter la vitesse d'envoi…</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="643"/>
<source>Limit download rate...</source>
<translation>Limiter la vitesse de réception…</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="645"/>
<source>Open destination folder</source>
<translation>Ouvrir le répertoire de destination</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="647"/>
<source>Move up</source>
<comment>i.e. move up in the queue</comment>
<translation>Déplacer vers le haut</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="649"/>
<source>Move down</source>
<comment>i.e. Move down in the queue</comment>
<translation>Déplacer vers le bas</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="651"/>
<source>Move to top</source>
<comment>i.e. Move to top of the queue</comment>
<translation>Déplacer tout en haut</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="653"/>
<source>Move to bottom</source>
<comment>i.e. Move to bottom of the queue</comment>
<translation>Déplacer tout en bas</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="655"/>
<source>Set location...</source>
<translation>Chemin de sauvegarde…</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="661"/>
<source>Copy name</source>
<translation>Copier nom</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="813"/>
<source>Priority</source>
<translation>Priorité</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="657"/>
<source>Force recheck</source>
<translation>Forcer une revérification</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="659"/>
<source>Copy magnet link</source>
<translation>Copier le lien magnet</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="663"/>
<source>Super seeding mode</source>
<translation>Mode de super-partage</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="666"/>
<source>Rename...</source>
<translation>Renommer…</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="668"/>
<source>Download in sequential order</source>
<translation>Téléchargement séquentiel</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="671"/>
<source>Download first and last piece first</source>
<translation>Téléchargement prioritaire du début et de la fin</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="764"/>
<source>New...</source>
<comment>New label...</comment>
<translation>Nouvelle catégorie…</translation>
</message>
<message>
<location filename="../gui/transferlistwidget.cpp" line="765"/>
<source>Reset</source>
<comment>Reset label</comment>
<translation>Réinitialiser la catégorie</translation>
</message>
</context>
<context>
<name>UpDownRatioDlg</name>
<message>
<location filename="../gui/updownratiodlg.ui" line="14"/>
<source>Torrent Upload/Download Ratio Limiting</source>
<translation>Limitation du ratio de partage</translation>
</message>
<message>
<location filename="../gui/updownratiodlg.ui" line="20"/>
<source>Use global ratio limit</source>
<translation>Utiliser la limite globale</translation>
</message>
<message>
<location filename="../gui/updownratiodlg.ui" line="23"/>
<location filename="../gui/updownratiodlg.ui" line="33"/>
<location filename="../gui/updownratiodlg.ui" line="45"/>
<source>buttonGroup</source>
<translation>buttonGroup</translation>
</message>
<message>
<location filename="../gui/updownratiodlg.ui" line="30"/>
<source>Set no ratio limit</source>
<translation>Ne pas limiter le ratio</translation>
</message>
<message>
<location filename="../gui/updownratiodlg.ui" line="42"/>
<source>Set ratio limit to</source>
<translation>Limiter le ratio à</translation>
</message>
</context>
<context>
<name>WebUI</name>
<message>
<location filename="../webui/webui.cpp" line="84"/>
<source>The Web UI is listening on port %1</source>
<translation>L'interface web est associée au port %1</translation>
</message>
<message>
<location filename="../webui/webui.cpp" line="86"/>
<source>Web UI Error - Unable to bind Web UI to port %1</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>about</name>
<message>
<location filename="../gui/about_imp.h" line="55"/>
<source>An advanced BitTorrent client programmed in <nobr>C++</nobr>, based on Qt toolkit and libtorrent-rasterbar.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/about_imp.h" line="57"/>
<source>Copyright %1 2006-2015 The qBittorrent project</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/about_imp.h" line="59"/>
<source>Home Page: </source>
<translation>Site officiel :</translation>
</message>
<message>
<location filename="../gui/about_imp.h" line="61"/>
<source>Bug Tracker: </source>
<translation>Suivi des bogues :</translation>
</message>
<message>
<location filename="../gui/about_imp.h" line="63"/>
<source>Forum: </source>
<translation>Forum :</translation>
</message>
<message>
<location filename="../gui/about_imp.h" line="66"/>
<source>IRC: #qbittorrent on Freenode</source>
<translation>IRC : #qbittorrent sur freenode</translation>
</message>
</context>
<context>
<name>addPeersDialog</name>
<message>
<location filename="../gui/properties/peersadditiondlg.ui" line="14"/>
<source>Add Peers</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/peersadditiondlg.ui" line="20"/>
<source>List of peers to add (one per line):</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/properties/peersadditiondlg.ui" line="37"/>
<source>Format: IPv4:port / [IPv6]:port</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>authentication</name>
<message>
<location filename="../gui/login.ui" line="14"/>
<location filename="../gui/login.ui" line="47"/>
<source>Tracker authentication</source>
<translation>Authentification du tracker</translation>
</message>
<message>
<location filename="../gui/login.ui" line="64"/>
<source>Tracker:</source>
<translation>Tracker :</translation>
</message>
<message>
<location filename="../gui/login.ui" line="86"/>
<source>Login</source>
<translation>Authentification</translation>
</message>
<message>
<location filename="../gui/login.ui" line="94"/>
<source>Username:</source>
<translation>Nom d'utilisateur :</translation>
</message>
<message>
<location filename="../gui/login.ui" line="117"/>
<source>Password:</source>
<translation>Mot de passe :</translation>
</message>
<message>
<location filename="../gui/login.ui" line="154"/>
<source>Log in</source>
<translation>S'authentifier</translation>
</message>
<message>
<location filename="../gui/login.ui" line="161"/>
<source>Cancel</source>
<translation>Annuler</translation>
</message>
</context>
<context>
<name>confirmDeletionDlg</name>
<message>
<location filename="../gui/confirmdeletiondlg.ui" line="20"/>
<source>Deletion confirmation - qBittorrent</source>
<translation>Confirmation de la suppression – qBittorrent</translation>
</message>
<message>
<location filename="../gui/confirmdeletiondlg.ui" line="67"/>
<source>Remember choice</source>
<translation>Se souvenir du choix</translation>
</message>
<message>
<location filename="../gui/confirmdeletiondlg.ui" line="94"/>
<source>Also delete the files on the hard disk</source>
<translation>Supprimer également les fichiers sur le disque</translation>
</message>
</context>
<context>
<name>createTorrentDialog</name>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="308"/>
<source>Cancel</source>
<translation>Annuler</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="14"/>
<source>Torrent Creation Tool</source>
<translation>Utilitaire de création de torrent</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="38"/>
<source>Torrent file creation</source>
<translation>Création d'un fichier torrent</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="60"/>
<source>Add file</source>
<translation>Ajouter un fichier</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="67"/>
<source>Add folder</source>
<translation>Ajouter un dossier</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="48"/>
<source>File or folder to add to the torrent:</source>
<translation>Fichier ou dossier à ajouter au torrent :</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="78"/>
<source>Tracker URLs:</source>
<translation>URL des trackers :</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="88"/>
<source>Web seeds urls:</source>
<translation>URL des sources web :</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="98"/>
<source>Comment:</source>
<translation>Commentaire :</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="127"/>
<source>You can separate tracker tiers / groups with an empty line.</source>
<comment>A tracker tier is a group of trackers, consisting of a main tracker and its mirrors.</comment>
<translation>Vous pouvez séparer les niveaux / groupes du tracker par une ligne vide.</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="148"/>
<source>Piece size:</source>
<translation>Taille des morceaux :</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="165"/>
<source>16 KiB</source>
<translation type="unfinished">512 Kio {16 ?}</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="170"/>
<source>32 KiB</source>
<translation>32 Kio</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="175"/>
<source>64 KiB</source>
<translation>64 Kio</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="180"/>
<source>128 KiB</source>
<translation>128 Kio</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="185"/>
<source>256 KiB</source>
<translation>256 Kio</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="190"/>
<source>512 KiB</source>
<translation>512 Kio</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="195"/>
<source>1 MiB</source>
<translation>1 Mio</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="200"/>
<source>2 MiB</source>
<translation>2 Mio</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="205"/>
<source>4 MiB</source>
<translation>4 Mio</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="210"/>
<source>8 MiB</source>
<translation type="unfinished">4 Mio {8 ?}</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="215"/>
<source>16 MiB</source>
<translation type="unfinished">4 Mio {16 ?}</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="223"/>
<source>Auto</source>
<translation>Automatique</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="248"/>
<source>Private (won't be distributed on DHT network if enabled)</source>
<translation>Privé (ne sera pas distribué sur le réseau DHT si activé)</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="255"/>
<source>Start seeding after creation</source>
<translation>Commencer le partage directement</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="265"/>
<source>Ignore share ratio limits for this torrent</source>
<translation>Ignorer les limites du ratio de partage pour ce torrent</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="301"/>
<source>Create and save...</source>
<translation>Créer et sauvegarder…</translation>
</message>
<message>
<location filename="../gui/torrentcreatordlg.ui" line="272"/>
<source>Progress:</source>
<translation>Progression :</translation>
</message>
</context>
<context>
<name>downloadFromURL</name>
<message>
<location filename="../gui/downloadfromurldlg.ui" line="28"/>
<source>Add torrent links</source>
<translation>Ajout de liens vers des torrents</translation>
</message>
<message>
<location filename="../gui/downloadfromurldlg.ui" line="58"/>
<source>One per line (HTTP links, Magnet links and info-hashes are supported)</source>
<translation>Un par ligne (les liens HTTP, liens magnet et info-hachages sont supportés)</translation>
</message>
<message>
<location filename="../gui/downloadfromurldlg.ui" line="80"/>
<source>Download</source>
<translation>Télécharger</translation>
</message>
<message>
<location filename="../gui/downloadfromurldlg.ui" line="87"/>
<source>Cancel</source>
<translation>Annuler</translation>
</message>
<message>
<location filename="../gui/downloadfromurldlg.ui" line="14"/>
<source>Download from urls</source>
<translation>Téléchargement depuis des URL</translation>
</message>
<message>
<location filename="../gui/downloadfromurldlg.h" line="96"/>
<source>No URL entered</source>
<translation>Aucune URL entrée</translation>
</message>
<message>
<location filename="../gui/downloadfromurldlg.h" line="96"/>
<source>Please type at least one URL.</source>
<translation>Veuillez entrer au moins une URL.</translation>
</message>
</context>
<context>
<name>engineSelect</name>
<message>
<location filename="../searchengine/engineselect.ui" line="17"/>
<source>Search plugins</source>
<translation>Greffons de recherche</translation>
</message>
<message>
<location filename="../searchengine/engineselect.ui" line="30"/>
<source>Installed search engines:</source>
<translation>Moteurs de recherche installés :</translation>
</message>
<message>
<location filename="../searchengine/engineselect.ui" line="50"/>
<source>Name</source>
<translation>Nom</translation>
</message>
<message>
<location filename="../searchengine/engineselect.ui" line="55"/>
<source>Version</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/engineselect.ui" line="60"/>
<source>Url</source>
<translation>URL</translation>
</message>
<message>
<location filename="../searchengine/engineselect.ui" line="65"/>
<location filename="../searchengine/engineselect.ui" line="124"/>
<source>Enabled</source>
<translation>Activé</translation>
</message>
<message>
<location filename="../searchengine/engineselect.ui" line="83"/>
<source>You can get new search engine plugins here: <a href="http://plugins.qbittorrent.org">http://plugins.qbittorrent.org</a></source>
<translation>D’avantage de greffons de recherche ici : <a href="http://plugins.qbittorrent.org">http://plugins.qbittorrent.org</a></translation>
</message>
<message>
<location filename="../searchengine/engineselect.ui" line="98"/>
<source>Install a new one</source>
<translation>Installer un nouveau</translation>
</message>
<message>
<location filename="../searchengine/engineselect.ui" line="105"/>
<source>Check for updates</source>
<translation>Mettre à jour</translation>
</message>
<message>
<location filename="../searchengine/engineselect.ui" line="112"/>
<source>Close</source>
<translation>Fermer</translation>
</message>
<message>
<location filename="../searchengine/engineselect.ui" line="129"/>
<source>Uninstall</source>
<translation>Désinstaller</translation>
</message>
</context>
<context>
<name>engineSelectDlg</name>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="203"/>
<source>Uninstall warning</source>
<translation>Désinstallation</translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="205"/>
<source>Uninstall success</source>
<translation>Désinstallation réussie</translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="264"/>
<source>Invalid plugin</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="264"/>
<source>The search engine plugin is invalid, please contact the author.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="270"/>
<source>A more recent version of '%1' search engine plugin is already installed.</source>
<comment>%1 is the name of the search engine</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="295"/>
<source>'%1' search engine plugin could not be updated, keeping old version.</source>
<comment>%1 is the name of the search engine</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="300"/>
<source>'%1' search engine plugin could not be installed.</source>
<comment>%1 is the name of the search engine</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="310"/>
<source>'%1' search engine plugin was successfully updated.</source>
<comment>%1 is the name of the search engine</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="313"/>
<source>'%1' search engine plugin was successfully installed.</source>
<comment>%1 is the name of the search engine</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="381"/>
<source>The link doesn't seem to point to a search engine plugin.</source>
<translation>Le lien ne semble pas pointer sur un plugin de moteur de recherche.</translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="396"/>
<source>Select search plugins</source>
<translation>Sélectionnez les greffons</translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="517"/>
<source>Sorry, '%1' search plugin installation failed.</source>
<comment>%1 is the name of the search engine</comment>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="270"/>
<location filename="../searchengine/engineselectdlg.cpp" line="295"/>
<location filename="../searchengine/engineselectdlg.cpp" line="300"/>
<location filename="../searchengine/engineselectdlg.cpp" line="310"/>
<location filename="../searchengine/engineselectdlg.cpp" line="313"/>
<source>Search plugin install</source>
<translation>Installation d'un greffon de recherche</translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="146"/>
<location filename="../searchengine/engineselectdlg.cpp" line="217"/>
<location filename="../searchengine/engineselectdlg.cpp" line="333"/>
<source>Yes</source>
<translation>Oui</translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="149"/>
<location filename="../searchengine/engineselectdlg.cpp" line="183"/>
<location filename="../searchengine/engineselectdlg.cpp" line="220"/>
<location filename="../searchengine/engineselectdlg.cpp" line="336"/>
<source>No</source>
<translation>Non</translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="397"/>
<source>qBittorrent search plugin</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="448"/>
<location filename="../searchengine/engineselectdlg.cpp" line="489"/>
<location filename="../searchengine/engineselectdlg.cpp" line="510"/>
<location filename="../searchengine/engineselectdlg.cpp" line="517"/>
<source>Search plugin update</source>
<translation>Mise à jour du greffon de recherche</translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="489"/>
<location filename="../searchengine/engineselectdlg.cpp" line="510"/>
<source>Sorry, update server is temporarily unavailable.</source>
<translation>Désolé, le serveur de mise à jour est temporairement indisponible.</translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="448"/>
<source>All your plugins are already up to date.</source>
<translation>Tous vos greffons de recherche sont déjà à jour.</translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="205"/>
<source>All selected plugins were uninstalled successfully</source>
<translation>Tous les greffons sélectionnés ont été désinstallés avec succès</translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="203"/>
<source>Some plugins could not be uninstalled because they are included in qBittorrent. Only the ones you added yourself can be uninstalled.
Those plugins were disabled.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="381"/>
<source>Invalid link</source>
<translation>Lien invalide</translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="373"/>
<location filename="../searchengine/engineselectdlg.cpp" line="382"/>
<source>New search engine plugin URL</source>
<translation>Adresse du nouveau greffon de recherche</translation>
</message>
<message>
<location filename="../searchengine/engineselectdlg.cpp" line="374"/>
<location filename="../searchengine/engineselectdlg.cpp" line="383"/>
<source>URL:</source>
<translation>Adresse :</translation>
</message>
</context>
<context>
<name>errorDialog</name>
<message>
<location filename="../app/stacktrace_win_dlg.ui" line="14"/>
<source>Crash info</source>
<translation>Information de plantage</translation>
</message>
</context>
<context>
<name>fsutils</name>
<message>
<location filename="../core/utils/fs.cpp" line="444"/>
<location filename="../core/utils/fs.cpp" line="451"/>
<location filename="../core/utils/fs.cpp" line="461"/>
<location filename="../core/utils/fs.cpp" line="494"/>
<location filename="../core/utils/fs.cpp" line="506"/>
<source>Downloads</source>
<translation>Téléchargements</translation>
</message>
</context>
<context>
<name>misc</name>
<message>
<location filename="../core/utils/misc.cpp" line="82"/>
<source>B</source>
<comment>bytes</comment>
<translation>o</translation>
</message>
<message>
<location filename="../core/utils/misc.cpp" line="83"/>
<source>KiB</source>
<comment>kibibytes (1024 bytes)</comment>
<translation>Kio</translation>
</message>
<message>
<location filename="../core/utils/misc.cpp" line="84"/>
<source>MiB</source>
<comment>mebibytes (1024 kibibytes)</comment>
<translation>Mio</translation>
</message>
<message>
<location filename="../core/utils/misc.cpp" line="85"/>
<source>GiB</source>
<comment>gibibytes (1024 mibibytes)</comment>
<translation>Gio</translation>
</message>
<message>
<location filename="../core/utils/misc.cpp" line="86"/>
<source>TiB</source>
<comment>tebibytes (1024 gibibytes)</comment>
<translation>Tio</translation>
</message>
<message>
<location filename="../core/utils/misc.cpp" line="283"/>
<source>Python not detected</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/utils/misc.cpp" line="311"/>
<source>Python version: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../core/utils/misc.cpp" line="338"/>
<source>/s</source>
<comment>per second</comment>
<translation>/s</translation>
</message>
<message>
<location filename="../core/utils/misc.cpp" line="426"/>
<source>%1h %2m</source>
<comment>e.g: 3hours 5minutes</comment>
<translation>%1h %2m</translation>
</message>
<message>
<location filename="../core/utils/misc.cpp" line="430"/>
<source>%1d %2h</source>
<comment>e.g: 2days 10hours</comment>
<translation>%1j %2h</translation>
</message>
<message>
<location filename="../core/utils/misc.cpp" line="326"/>
<source>Unknown</source>
<comment>Unknown (size)</comment>
<translation>Inconnue</translation>
</message>
<message>
<location filename="../core/utils/misc.cpp" line="206"/>
<source>qBittorrent will shutdown the computer now because all downloads are complete.</source>
<translation>qBittorrent va maintenant éteindre l'ordinateur car tous les téléchargements sont terminés.</translation>
</message>
<message>
<location filename="../core/utils/misc.cpp" line="419"/>
<source>< 1m</source>
<comment>< 1 minute</comment>
<translation>< 1min</translation>
</message>
<message>
<location filename="../core/utils/misc.cpp" line="422"/>
<source>%1m</source>
<comment>e.g: 10minutes</comment>
<translation>%1min</translation>
</message>
<message>
<location filename="../webui/btjson.cpp" line="388"/>
<source>Working</source>
<translation>Fonctionne</translation>
</message>
<message>
<location filename="../webui/btjson.cpp" line="386"/>
<source>Updating...</source>
<translation>Mise à jour…</translation>
</message>
<message>
<location filename="../webui/btjson.cpp" line="390"/>
<source>Not working</source>
<translation>Ne fonctionne pas</translation>
</message>
<message>
<location filename="../webui/btjson.cpp" line="384"/>
<source>Not contacted yet</source>
<translation>Pas encore contacté</translation>
</message>
</context>
<context>
<name>options_imp</name>
<message>
<location filename="../gui/options_imp.cpp" line="1249"/>
<location filename="../gui/options_imp.cpp" line="1251"/>
<source>Choose export directory</source>
<translation>Choisir un dossier pour l'export</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1289"/>
<location filename="../gui/options_imp.cpp" line="1291"/>
<location filename="../gui/options_imp.cpp" line="1302"/>
<location filename="../gui/options_imp.cpp" line="1304"/>
<source>Choose a save directory</source>
<translation>Choisir un répertoire de sauvegarde</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1202"/>
<source>Add directory to scan</source>
<translation>Ajouter un dossier à surveiller</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="185"/>
<source>Supported parameters (case sensitive):</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="186"/>
<source>%N: Torrent name</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="187"/>
<source>%L: Label</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="188"/>
<source>%F: Content path (same as root path for multifile torrent)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="189"/>
<source>%R: Root path (first torrent subdirectory path)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="190"/>
<source>%D: Save path</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="191"/>
<source>%C: Number of files</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="192"/>
<source>%Z: Torrent size (bytes)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="193"/>
<source>%T: Current tracker</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="194"/>
<source>%I: Info hash</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1209"/>
<source>Folder is already being watched.</source>
<translation>Ce dossier est déjà surveillé.</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1212"/>
<source>Folder does not exist.</source>
<translation>Ce dossier n'existe pas.</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1215"/>
<source>Folder is not readable.</source>
<translation>Ce dossier n'est pas accessible en lecture.</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1225"/>
<source>Failure</source>
<translation>Échec</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1225"/>
<source>Failed to add Scan Folder '%1': %2</source>
<translation>Impossible d'ajouter le dossier surveillé « %1 » : %2</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1275"/>
<location filename="../gui/options_imp.cpp" line="1277"/>
<source>Filters</source>
<translation>Filtres</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1275"/>
<location filename="../gui/options_imp.cpp" line="1277"/>
<source>Choose an IP filter file</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1344"/>
<source>SSL Certificate</source>
<translation>Certificat SSL</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1356"/>
<source>SSL Key</source>
<translation>Clé SSL</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1389"/>
<source>Parsing error</source>
<translation>Erreur de traitement</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1389"/>
<source>Failed to parse the provided IP filter</source>
<translation>Impossible de charger le filtre IP fourni</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1391"/>
<source>Successfully refreshed</source>
<translation>Correctement rechargé</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1391"/>
<source>Successfully parsed the provided IP filter: %1 rules were applied.</source>
<comment>%1 is a number</comment>
<translation>Le filtre IP a été correctement chargé : %1 règles ont été appliquées.</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1479"/>
<source>Invalid key</source>
<translation>Clé invalide</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1479"/>
<source>This is not a valid SSL key.</source>
<translation>Ceci n'est pas une clé SSL valide.</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1495"/>
<source>Invalid certificate</source>
<translation>Certificat invalide</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1495"/>
<source>This is not a valid SSL certificate.</source>
<translation>Ceci n'est pas un certificat SSL valide.</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1505"/>
<source>The start time and the end time can't be the same.</source>
<translation>Les heures de début et de fin ne peuvent être les mêmes.</translation>
</message>
<message>
<location filename="../gui/options_imp.cpp" line="1508"/>
<source>Time Error</source>
<translation>Erreur de temps</translation>
</message>
</context>
<context>
<name>pluginSourceDlg</name>
<message>
<location filename="../searchengine/pluginsource.ui" line="13"/>
<source>Plugin source</source>
<translation>Source du greffon</translation>
</message>
<message>
<location filename="../searchengine/pluginsource.ui" line="26"/>
<source>Search plugin source:</source>
<translation>Source du greffon de recherche :</translation>
</message>
<message>
<location filename="../searchengine/pluginsource.ui" line="35"/>
<source>Local file</source>
<translation>Fichier local</translation>
</message>
<message>
<location filename="../searchengine/pluginsource.ui" line="42"/>
<source>Web link</source>
<translation>Lien web</translation>
</message>
</context>
<context>
<name>preview</name>
<message>
<location filename="../gui/preview.ui" line="14"/>
<source>Preview selection</source>
<translation>Sélection du fichier à prévisualiser</translation>
</message>
<message>
<location filename="../gui/preview.ui" line="26"/>
<source>The following files support previewing, please select one of them:</source>
<translation>Les fichiers suivants prennent en charge la prévisualisation, sélectionnez-en un :</translation>
</message>
<message>
<location filename="../gui/preview.ui" line="61"/>
<source>Preview</source>
<translation>Prévisualiser</translation>
</message>
<message>
<location filename="../gui/preview.ui" line="68"/>
<source>Cancel</source>
<translation>Annuler</translation>
</message>
</context>
<context>
<name>search_engine</name>
<message>
<location filename="../searchengine/search.ui" line="14"/>
<location filename="../searchengine/search.ui" line="28"/>
<source>Search</source>
<translation>Recherche</translation>
</message>
<message>
<location filename="../searchengine/search.ui" line="51"/>
<source>Status:</source>
<translation>Statut :</translation>
</message>
<message>
<location filename="../searchengine/search.ui" line="75"/>
<source>Stopped</source>
<translation>Arrêtée</translation>
</message>
<message>
<location filename="../searchengine/search.ui" line="107"/>
<source>Download</source>
<translation>Télécharger</translation>
</message>
<message>
<location filename="../searchengine/search.ui" line="117"/>
<source>Go to description page</source>
<translation>Aller à la page de description</translation>
</message>
<message>
<location filename="../searchengine/search.ui" line="127"/>
<source>Copy description page URL</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../searchengine/search.ui" line="147"/>
<source>Search engines...</source>
<translation>Moteurs de recherche…</translation>
</message>
</context>
</TS><|fim▁end|> | |
<|file_name|>test_utils_io_list.py<|end_file_name|><|fim▁begin|>import sys, os
sys.path.append(os.path.abspath("../utils/"))
from utils_io_list import *
from test_utils_io_folder import *
def test_generate_pairs_for_each_folder():
images_folder_path= "folder/path/example"
num_of_frames = 2
pairs = generate_pairs_for_each_folder(images_folder_path, num_of_frames)
expected_pair = [("example", 0), ("example", 1)]
if expected_pair == pairs:
return True
else:
return False
def test_generate_num_of_frames_list():
folders_paths_list = ['../temp_folder_1', '../temp_folder_2']
for folder_path in folders_paths_list:
create_folder(folder_path)
create_dummy_files_in_folder(folder_path)
num_of_frames_list = generate_num_of_frames_list(folders_paths_list)
for folder_path in folders_paths_list:
shutil.rmtree(folder_path)
expected_list = [10, 10]
if expected_list == num_of_frames_list:
return True
else:
return False
def test_generate_pairs_with_two_lists():
folders_paths_list = ['../temp_folder_1', '../temp_folder_2']
num_of_frames_list = [1, 2]
pairs_list = generate_pairs_with_two_lists(folders_paths_list, num_of_frames_list)
expected_list = [('temp_folder_1', 0), ('temp_folder_2', 0), ('temp_folder_2', 1)]
if expected_list == pairs_list:
return True
else:
return False
def test_generate_pairs_list_for_training():
dataset_folder_path = '/home/ngh/dev/ROLO-dev/benchmark/ILSVRC2015/Data/VID/train/ILSVRC2015_VID_train_0000/'
output_folder_path = '/home/ngh/dev/ROLO-TRACK/training_list/'
create_folder(output_folder_path)
txt_file_path = os.path.join(output_folder_path, 'list_0.txt')
numpy_file_path = os.path.join(output_folder_path, 'list_0')
finished = generate_pairs_list_for_training(dataset_folder_path, numpy_file_path, txt_file_path)
if finished is True:
return True
else:
return False
def main():
print("Testing: utils_io_list")
passed = test_generate_num_of_frames_list()
if passed is False:
print("test_generate_num_of_frames_list failed")
passed = test_generate_pairs_for_each_folder()
if passed is False:
print("test_generate_pairs_for_each_folder failed")
passed = test_generate_pairs_with_two_lists()
if passed is False:
print("test_generate_pairs_with_two_lists failed")
passed = test_generate_pairs_list_for_training()
if passed is False:
print("test_generate_pairs_list_for_training failed")
if __name__ == "__main__":<|fim▁hole|><|fim▁end|> | main() |
<|file_name|>TopologyGuiceModule.java<|end_file_name|><|fim▁begin|>/*
* ToroDB
* Copyright © 2014 8Kdata Technology (www.8kdata.com)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.torodb.mongodb.repl.topology;
import com.google.inject.Exposed;
import com.google.inject.PrivateModule;
import com.google.inject.Provides;
import com.torodb.core.concurrent.ConcurrentToolsFactory;
import com.torodb.core.supervision.Supervisor;
import com.torodb.mongodb.repl.SyncSourceProvider;
import com.torodb.mongodb.repl.guice.MongoDbRepl;
import java.time.Clock;
import java.time.Duration;
import java.util.concurrent.ThreadFactory;
import javax.inject.Singleton;
/**
*
*/
public class TopologyGuiceModule extends PrivateModule {
@Override
protected void configure() {
bind(HeartbeatNetworkHandler.class)
.to(MongoClientHeartbeatNetworkHandler.class)
.in(Singleton.class);
bind(SyncSourceProvider.class)
.to(RetrierTopologySyncSourceProvider.class)
.in(Singleton.class);
expose(SyncSourceProvider.class);
bind(TopologyErrorHandler.class)
.to(DefaultTopologyErrorHandler.class)
.in(Singleton.class);
bind(SyncSourceRetrier.class)
.in(Singleton.class);
bind(TopologyHeartbeatHandler.class)
.in(Singleton.class);
bind(TopologySyncSourceProvider.class)
.in(Singleton.class);
}
@Provides
@Topology
Supervisor getTopologySupervisor(@MongoDbRepl Supervisor replSupervisor) {
return replSupervisor;
}
@Provides
@Singleton
@Exposed
public TopologyService createTopologyService(ThreadFactory threadFactory,
TopologyHeartbeatHandler heartbeatHandler, TopologyExecutor executor,
Clock clock) {
return new TopologyService(heartbeatHandler, threadFactory, executor, clock);
}
@Provides
@Singleton<|fim▁hole|> TopologyExecutor createTopologyExecutor(
ConcurrentToolsFactory concurrentToolsFactory) {
//TODO: Being able to configure max sync source lag and replication delay
return new TopologyExecutor(concurrentToolsFactory, Duration.ofMinutes(1),
Duration.ZERO);
}
}<|fim▁end|> | |
<|file_name|>transaction.go<|end_file_name|><|fim▁begin|>package transaction
import (
. "DNA/common"
"DNA/common/serialization"
"DNA/core/contract"
"DNA/core/contract/program"
sig "DNA/core/signature"
"DNA/core/transaction/payload"
. "DNA/errors"
"crypto/sha256"
"errors"
"fmt"
"io"
"sort"
)
//for different transaction types with different payload format
//and transaction process methods
type TransactionType byte
const (
BookKeeping TransactionType = 0x00
IssueAsset TransactionType = 0x01
BookKeeper TransactionType = 0x02
PrivacyPayload TransactionType = 0x20
RegisterAsset TransactionType = 0x40
TransferAsset TransactionType = 0x80
Record TransactionType = 0x81
DeployCode TransactionType = 0xd0
DataFile TransactionType = 0x12
)
//Payload define the func for loading the payload data
//base on payload type which have different struture
type Payload interface {
// Get payload data
Data(version byte) []byte
//Serialize payload data
Serialize(w io.Writer, version byte) error
Deserialize(r io.Reader, version byte) error
}
//Transaction is used for carry information or action to Ledger
//validated transaction will be added to block and updates state correspondingly
var TxStore ILedgerStore
type Transaction struct {
TxType TransactionType
PayloadVersion byte
Payload Payload
Attributes []*TxAttribute
UTXOInputs []*UTXOTxInput
BalanceInputs []*BalanceTxInput
Outputs []*TxOutput
Programs []*program.Program
//Inputs/Outputs map base on Asset (needn't serialize)
AssetOutputs map[Uint256][]*TxOutput
AssetInputAmount map[Uint256]Fixed64
AssetOutputAmount map[Uint256]Fixed64
hash *Uint256
}
//Serialize the Transaction
func (tx *Transaction) Serialize(w io.Writer) error {
err := tx.SerializeUnsigned(w)
if err != nil {
return NewDetailErr(err, ErrNoCode, "Transaction txSerializeUnsigned Serialize failed.")
}
//Serialize Transaction's programs
lens := uint64(len(tx.Programs))
err = serialization.WriteVarUint(w, lens)
if err != nil {
return NewDetailErr(err, ErrNoCode, "Transaction WriteVarUint failed.")
}
if lens > 0 {
for _, p := range tx.Programs {
err = p.Serialize(w)
if err != nil {
return NewDetailErr(err, ErrNoCode, "Transaction Programs Serialize failed.")
}
}
}
return nil
}
//Serialize the Transaction data without contracts
func (tx *Transaction) SerializeUnsigned(w io.Writer) error {
//txType
w.Write([]byte{byte(tx.TxType)})
//PayloadVersion
w.Write([]byte{tx.PayloadVersion})
//Payload
if tx.Payload == nil {
return errors.New("Transaction Payload is nil.")
}
tx.Payload.Serialize(w, tx.PayloadVersion)
//[]*txAttribute
err := serialization.WriteVarUint(w, uint64(len(tx.Attributes)))
if err != nil {
return NewDetailErr(err, ErrNoCode, "Transaction item txAttribute length serialization failed.")
}
if len(tx.Attributes) > 0 {
for _, attr := range tx.Attributes {
attr.Serialize(w)
}
}
//[]*UTXOInputs
err = serialization.WriteVarUint(w, uint64(len(tx.UTXOInputs)))
if err != nil {
return NewDetailErr(err, ErrNoCode, "Transaction item UTXOInputs length serialization failed.")
}
if len(tx.UTXOInputs) > 0 {
for _, utxo := range tx.UTXOInputs {
utxo.Serialize(w)
}
}
// TODO BalanceInputs
//[]*Outputs
err = serialization.WriteVarUint(w, uint64(len(tx.Outputs)))
if err != nil {
return NewDetailErr(err, ErrNoCode, "Transaction item Outputs length serialization failed.")
}
if len(tx.Outputs) > 0 {
for _, output := range tx.Outputs {
output.Serialize(w)
}
}
return nil
}
//deserialize the Transaction
func (tx *Transaction) Deserialize(r io.Reader) error {
// tx deserialize
err := tx.DeserializeUnsigned(r)
if err != nil {
return NewDetailErr(err, ErrNoCode, "transaction Deserialize error")
}
// tx program
lens, err := serialization.ReadVarUint(r, 0)
if err != nil {
return NewDetailErr(err, ErrNoCode, "transaction tx program Deserialize error")
}
programHashes := []*program.Program{}
if lens > 0 {
for i := 0; i < int(lens); i++ {
outputHashes := new(program.Program)
outputHashes.Deserialize(r)
programHashes = append(programHashes, outputHashes)
}
tx.Programs = programHashes
}
return nil
}
func (tx *Transaction) DeserializeUnsigned(r io.Reader) error {
var txType [1]byte
_, err := io.ReadFull(r, txType[:])
if err != nil {
return err
}
tx.TxType = TransactionType(txType[0])
return tx.DeserializeUnsignedWithoutType(r)
}
func (tx *Transaction) DeserializeUnsignedWithoutType(r io.Reader) error {
var payloadVersion [1]byte
_, err := io.ReadFull(r, payloadVersion[:])
tx.PayloadVersion = payloadVersion[0]
if err != nil {
return err
}
//payload
//tx.Payload.Deserialize(r)
switch tx.TxType {
case RegisterAsset:
tx.Payload = new(payload.RegisterAsset)
case IssueAsset:
tx.Payload = new(payload.IssueAsset)
case TransferAsset:
tx.Payload = new(payload.TransferAsset)
case BookKeeping:
tx.Payload = new(payload.BookKeeping)
case Record:
tx.Payload = new(payload.Record)
case BookKeeper:
tx.Payload = new(payload.BookKeeper)
case PrivacyPayload:
tx.Payload = new(payload.PrivacyPayload)
case DataFile:
tx.Payload = new(payload.DataFile)
default:
return errors.New("[Transaction],invalide transaction type.")
}
err = tx.Payload.Deserialize(r, tx.PayloadVersion)
if err != nil {
return NewDetailErr(err, ErrNoCode, "Payload Parse error")
}
//attributes
Len, err := serialization.ReadVarUint(r, 0)
if err != nil {
return err
}
if Len > uint64(0) {
for i := uint64(0); i < Len; i++ {
attr := new(TxAttribute)
err = attr.Deserialize(r)
if err != nil {
return err
}
tx.Attributes = append(tx.Attributes, attr)
}
}
//UTXOInputs
Len, err = serialization.ReadVarUint(r, 0)
if err != nil {
return err
}
if Len > uint64(0) {
for i := uint64(0); i < Len; i++ {
utxo := new(UTXOTxInput)
err = utxo.Deserialize(r)
if err != nil {
return err
}
tx.UTXOInputs = append(tx.UTXOInputs, utxo)
}
}
//TODO balanceInputs
//Outputs
Len, err = serialization.ReadVarUint(r, 0)
if err != nil {
return err
}
if Len > uint64(0) {
for i := uint64(0); i < Len; i++ {
output := new(TxOutput)
output.Deserialize(r)
tx.Outputs = append(tx.Outputs, output)
}
}
return nil
}
func (tx *Transaction) GetProgramHashes() ([]Uint160, error) {
if tx == nil {
return []Uint160{}, errors.New("[Transaction],GetProgramHashes transaction is nil.")
}
hashs := []Uint160{}
uniqHashes := []Uint160{}
// add inputUTXO's transaction
referenceWithUTXO_Output, err := tx.GetReference()
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes failed.")
}
for _, output := range referenceWithUTXO_Output {
programHash := output.ProgramHash
hashs = append(hashs, programHash)
}
for _, attribute := range tx.Attributes {
if attribute.Usage == Script {
dataHash, err := Uint160ParseFromBytes(attribute.Data)
if err != nil {
return nil, NewDetailErr(errors.New("[Transaction], GetProgramHashes err."), ErrNoCode, "")
}
hashs = append(hashs, Uint160(dataHash))
}
}
switch tx.TxType {
case RegisterAsset:
issuer := tx.Payload.(*payload.RegisterAsset).Issuer
signatureRedeemScript, err := contract.CreateSignatureRedeemScript(issuer)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes CreateSignatureRedeemScript failed.")
}
astHash, err := ToCodeHash(signatureRedeemScript)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes ToCodeHash failed.")
}
hashs = append(hashs, astHash)
case IssueAsset:
result := tx.GetMergedAssetIDValueFromOutputs()
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetTransactionResults failed.")
}
for k := range result {
tx, err := TxStore.GetTransaction(k)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, fmt.Sprintf("[Transaction], GetTransaction failed With AssetID:=%x", k))
}
if tx.TxType != RegisterAsset {
return nil, NewDetailErr(errors.New("[Transaction] error"), ErrNoCode, fmt.Sprintf("[Transaction], Transaction Type ileage With AssetID:=%x", k))
}
switch v1 := tx.Payload.(type) {
case *payload.RegisterAsset:
hashs = append(hashs, v1.Controller)
default:
return nil, NewDetailErr(errors.New("[Transaction] error"), ErrNoCode, fmt.Sprintf("[Transaction], payload is illegal", k))
}
}
case DataFile:<|fim▁hole|> if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes CreateSignatureRedeemScript failed.")
}
astHash, err := ToCodeHash(signatureRedeemScript)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes ToCodeHash failed.")
}
hashs = append(hashs, astHash)
case TransferAsset:
case Record:
case BookKeeper:
issuer := tx.Payload.(*payload.BookKeeper).Issuer
signatureRedeemScript, err := contract.CreateSignatureRedeemScript(issuer)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction - BookKeeper], GetProgramHashes CreateSignatureRedeemScript failed.")
}
astHash, err := ToCodeHash(signatureRedeemScript)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction - BookKeeper], GetProgramHashes ToCodeHash failed.")
}
hashs = append(hashs, astHash)
case PrivacyPayload:
issuer := tx.Payload.(*payload.PrivacyPayload).EncryptAttr.(*payload.EcdhAes256).FromPubkey
signatureRedeemScript, err := contract.CreateSignatureRedeemScript(issuer)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes CreateSignatureRedeemScript failed.")
}
astHash, err := ToCodeHash(signatureRedeemScript)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes ToCodeHash failed.")
}
hashs = append(hashs, astHash)
default:
}
//remove dupilicated hashes
uniq := make(map[Uint160]bool)
for _, v := range hashs {
uniq[v] = true
}
for k := range uniq {
uniqHashes = append(uniqHashes, k)
}
sort.Sort(byProgramHashes(uniqHashes))
return uniqHashes, nil
}
func (tx *Transaction) SetPrograms(programs []*program.Program) {
tx.Programs = programs
}
func (tx *Transaction) GetPrograms() []*program.Program {
return tx.Programs
}
func (tx *Transaction) GetOutputHashes() ([]Uint160, error) {
//TODO: implement Transaction.GetOutputHashes()
return []Uint160{}, nil
}
func (tx *Transaction) GenerateAssetMaps() {
//TODO: implement Transaction.GenerateAssetMaps()
}
func (tx *Transaction) GetMessage() []byte {
return sig.GetHashData(tx)
}
func (tx *Transaction) Hash() Uint256 {
if tx.hash == nil {
d := sig.GetHashData(tx)
temp := sha256.Sum256([]byte(d))
f := Uint256(sha256.Sum256(temp[:]))
tx.hash = &f
}
return *tx.hash
}
func (tx *Transaction) SetHash(hash Uint256) {
tx.hash = &hash
}
func (tx *Transaction) Type() InventoryType {
return TRANSACTION
}
func (tx *Transaction) Verify() error {
//TODO: Verify()
return nil
}
func (tx *Transaction) GetReference() (map[*UTXOTxInput]*TxOutput, error) {
if tx.TxType == RegisterAsset {
return nil, nil
}
//UTXO input / Outputs
reference := make(map[*UTXOTxInput]*TxOutput)
// Key index,v UTXOInput
for _, utxo := range tx.UTXOInputs {
transaction, err := TxStore.GetTransaction(utxo.ReferTxID)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetReference failed.")
}
index := utxo.ReferTxOutputIndex
reference[utxo] = transaction.Outputs[index]
}
return reference, nil
}
func (tx *Transaction) GetTransactionResults() (TransactionResult, error) {
result := make(map[Uint256]Fixed64)
outputResult := tx.GetMergedAssetIDValueFromOutputs()
InputResult, err := tx.GetMergedAssetIDValueFromReference()
if err != nil {
return nil, err
}
//calc the balance of input vs output
for outputAssetid, outputValue := range outputResult {
if inputValue, ok := InputResult[outputAssetid]; ok {
result[outputAssetid] = inputValue - outputValue
} else {
result[outputAssetid] -= outputValue
}
}
for inputAssetid, inputValue := range InputResult {
if _, exist := result[inputAssetid]; !exist {
result[inputAssetid] += inputValue
}
}
return result, nil
}
func (tx *Transaction) GetMergedAssetIDValueFromOutputs() TransactionResult {
var result = make(map[Uint256]Fixed64)
for _, v := range tx.Outputs {
amout, ok := result[v.AssetID]
if ok {
result[v.AssetID] = amout + v.Value
} else {
result[v.AssetID] = v.Value
}
}
return result
}
func (tx *Transaction) GetMergedAssetIDValueFromReference() (TransactionResult, error) {
reference, err := tx.GetReference()
if err != nil {
return nil, err
}
var result = make(map[Uint256]Fixed64)
for _, v := range reference {
amout, ok := result[v.AssetID]
if ok {
result[v.AssetID] = amout + v.Value
} else {
result[v.AssetID] = v.Value
}
}
return result, nil
}
type byProgramHashes []Uint160
func (a byProgramHashes) Len() int { return len(a) }
func (a byProgramHashes) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a byProgramHashes) Less(i, j int) bool {
if a[i].CompareTo(a[j]) > 0 {
return false
} else {
return true
}
}<|fim▁end|> | issuer := tx.Payload.(*payload.DataFile).Issuer
signatureRedeemScript, err := contract.CreateSignatureRedeemScript(issuer) |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>$(function()
{
$("#content").focus(function()
{
$(this).animate({"height": "85px",}, "fast" );
$("#button_block").slideDown("fast");
return false;
});
$("#cancel").click(function()
{
$("#content").animate({"height": "30px",}, "fast" );
$("#button_block").slideUp("fast");
return false;
});
function dragDrop(drag,i,j){
$(drag+i).draggable({
helper: 'clone',
revert : function(event, ui) {
// on older version of jQuery use "draggable"
// $(this).data("draggable")
// on 2.x versions of jQuery use "ui-draggable"
// $(this).data("ui-draggable")
$(this).data("uiDraggable").originalPosition = {
top : 0,
left : 0
};
// return boolean
return !event;
// that evaluate like this:
// return event !== false ? false : true;
}
});
if(j==0){
$('#dropzone'+j).droppable({
tolerance: 'touch',
activeClass: 'ui-state-default',
hoverClass: 'ui-state-hover',
drop: function(event, ui) {
var id = ui.draggable.attr("id");
var res = id.substr(9,9);
var post = parseInt(res);
text = $('div#contenttext'+post).text();
// alert(id);
$('#text').val(text);
$.ajax({
type: "POST",
url: "index.php",
data: text,
success:function(data){
$( ".tw-posts" ).prepend("<div class='tw-update'> <div class='post-container2'>"+text+"</div></div>");
$( "#button" ).trigger( "click" );
},
error:function (xhr, ajaxOptions, thrownError){
alert(thrownError); //throw any errors
}
});
console.log(text);
}
});
} else {
$('#dropzone'+j).droppable({
tolerance: 'touch',
activeClass: 'ui-state-default',
hoverClass: 'ui-state-hover'+j,
drop: function(event, ui) {
var id = ui.draggable.attr("id");
var res = id.substr(9,9);
var post = parseInt(res);
text = $('div#contenttext'+post).text();
// alert(id);
$('#text').val(text);
$.ajax({
type: "POST",
url: "index.php",
data: text,
success:function(data){
$( ".tw-posts" ).prepend("<div class='tw-update'> <div class='post-container2'>"+text+"</div></div>");
$( "#button" ).trigger( "click" );
},
error:function (xhr, ajaxOptions, thrownError){
alert(thrownError); //throw any errors<|fim▁hole|>
}
});
}
}
for (i=0;i<10; i++)
{
dragDrop("#draggable",i,0);
dragDrop("#draggabletw",i,1);
}
$('#publish').click(
function(){
$.ajax({
type: "POST",
url: "index.php",
data: $("#form2").serialize(), // serializes the form's elements.
beforeSend: function(){
},
success: function(data)
{
alert(data); // show response from the php script.
}
});
}
);
/*$("#form2").submit(function() {
var url = "publish.php"; // the script where you handle the form input.
$.ajax({
type: "POST",
url: url,
data: $("#form2").serialize(), // serializes the form's elements.
success: function(data)
{
alert(data); // show response from the php script.
}
});
return false; // avoid to execute the actual submit of the form.
});*/
});<|fim▁end|> | }
});
console.log(text); |
<|file_name|>validate.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# ***** BEGIN GPL LICENSE BLOCK *****
#
# --------------------------------------------------------------------------
# Blender 2.5 Extensions Framework
# --------------------------------------------------------------------------
#
# Authors:
# Doug Hammond
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
# ***** END GPL LICENCE BLOCK *****
#
"""
Pure logic and validation class.
By using a Subject object, and a dict of described logic tests, it
is possible to arrive at a True or False result for various purposes:
1. Data validation
2. UI control visibility
A Subject can be any object whose members are readable with getattr() :
class Subject(object):
a = 0
b = 1
c = 'foo'
d = True
e = False
f = 8
g = 'bar'
Tests are described thus:
Use the special list types Logic_AND and Logic_OR to describe
combinations of values and other members. Use Logic_Operator for
numerical comparison.
With regards to Subject, each of these evaluate to True:<|fim▁hole|> 'f': Logic_AND([8, {'b': 1}]),
'e': {'b': Logic_Operator({'gte':1, 'lt':3}) },
'g': Logic_OR([ 'baz', Logic_AND([{'b': 1}, {'f': 8}]) ])
}
With regards to Subject, each of these evaluate to False:
TESTB = {
'a': 'foo',
'c': Logic_OR([ 'bar', 'baz' ]),
'd': Logic_AND([ True, 'foo' ]),
'f': Logic_AND([9, {'b': 1}]),
'e': {'b': Logic_Operator({'gte':-10, 'lt': 1}) },
'g': Logic_OR([ 'baz', Logic_AND([{'b':0}, {'f': 8}]) ])
}
With regards to Subject, this test is invalid
TESTC = {
'n': 0
}
Tests are executed thus:
S = Subject()
L = Logician(S)
L.execute(TESTA)
"""
class Logic_AND(list):
pass
class Logic_OR(list):
pass
class Logic_Operator(dict):
pass
class Logician(object):
"""Given a subject and a dict that describes tests to perform on
its members, this class will evaluate True or False results for
each member/test pair. See the examples below for test syntax.
"""
subject = None
def __init__(self, subject):
self.subject = subject
def get_member(self, member_name):
"""Get a member value from the subject object. Raise exception
if subject is None or member not found.
"""
if self.subject is None:
raise Exception('Cannot run tests on a subject which is None')
return getattr(self.subject, member_name)
def test_logic(self, member, logic, operator='eq'):
"""Find the type of test to run on member, and perform that test"""
if type(logic) is dict:
return self.test_dict(member, logic)
elif type(logic) is Logic_AND:
return self.test_and(member, logic)
elif type(logic) is Logic_OR:
return self.test_or(member, logic)
elif type(logic) is Logic_Operator:
return self.test_operator(member, logic)
else:
# compare the value, I think using Logic_Operator() here
# allows completeness in test_operator(), but I can't put
# my finger on why for the minute
return self.test_operator(member,
Logic_Operator({operator: logic}))
def test_operator(self, member, value):
"""Execute the operators contained within value and expect that
ALL operators are True
"""
# something in this method is incomplete, what if operand is
# a dict, Logic_AND, Logic_OR or another Logic_Operator ?
# Do those constructs even make any sense ?
result = True
for operator, operand in value.items():
operator = operator.lower().strip()
if operator in ['eq', '==']:
result &= member==operand
if operator in ['not', '!=']:
result &= member!=operand
if operator in ['lt', '<']:
result &= member<operand
if operator in ['lte', '<=']:
result &= member<=operand
if operator in ['gt', '>']:
result &= member>operand
if operator in ['gte', '>=']:
result &= member>=operand
if operator in ['and', '&']:
result &= member&operand
if operator in ['or', '|']:
result &= member|operand
if operator in ['len']:
result &= len(member)==operand
# I can think of some more, but they're probably not useful.
return result
def test_or(self, member, logic):
"""Member is a value, logic is a set of values, ANY of which
can be True
"""
result = False
for test in logic:
result |= self.test_logic(member, test)
return result
def test_and(self, member, logic):
"""Member is a value, logic is a list of values, ALL of which
must be True
"""
result = True
for test in logic:
result &= self.test_logic(member, test)
return result
def test_dict(self, member, logic):
"""Member is a value, logic is a dict of other members to
compare to. All other member tests must be True
"""
result = True
for other_member, test in logic.items():
result &= self.test_logic(self.get_member(other_member), test)
return result
def execute(self, test):
"""Subject is an object, test is a dict of {member: test} pairs
to perform on subject's members. Wach key in test is a member
of subject.
"""
for member_name, logic in test.items():
result = self.test_logic(self.get_member(member_name), logic)
print('member %s is %s' % (member_name, result))
# A couple of name aliases
class Validation(Logician):
pass
class Visibility(Logician):
pass<|fim▁end|> | TESTA = {
'a': 0,
'c': Logic_OR([ 'foo', 'bar' ]),
'd': Logic_AND([True, True]), |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | pub mod thing_type; |
<|file_name|>Change.java<|end_file_name|><|fim▁begin|>package com.nexusplay.containers;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.sql.SQLException;
import org.apache.commons.io.IOUtils;
import com.nexusplay.db.SubtitlesDatabase;
import com.nexusplay.security.RandomContainer;
/**
* Contains a proposed change (to a subtitle).
* @author alex
*
*/
public class Change {
private String targetID, changedContent, id, originalContent, votes;
private int nrVotes;
/**
* Constructor for creating new objects, prior to storing them in the database.
* @param changedContent The change's original data
* @param originalContent The change's new data
* @param targetID The object targeted by the change
* @param votes The user IDs that voted this change
*/
public Change(String changedContent, String originalContent, String targetID, String votes){
this.changedContent = changedContent;
this.originalContent = originalContent;
this.targetID = targetID;
this.votes = votes;
nrVotes = votes.length() - votes.replace(";", "").length();
generateId();
}
/**
* This constructor should only be used for recreating a stored object.
* @param changedContent The change's original data
* @param originalContent The change's new data
* @param targetID The object targeted by the change
* @param votes The user IDs that voted this change
* @param id The change's unique ID
*/
public Change(String changedContent, String originalContent, String targetID, String votes, String id){
this.changedContent = changedContent;
this.originalContent = originalContent;
this.targetID = targetID;
this.votes = votes;
nrVotes = votes.length() - votes.replace(";", "").length();
this.id = id;
}
/**
* Commits a change to disk.
* @throws SQLException Thrown if the database is not accessible to us for whatever reason
* @throws FileNotFoundException Thrown if we're denied access to the subtitle file
* @throws IOException Thrown if an error appears while writing the file
*/
public void commitChange() throws SQLException, FileNotFoundException, IOException{
Subtitle sub = SubtitlesDatabase.getSubtitleByID(targetID);
FileInputStream input = new FileInputStream(SettingsContainer.getAbsoluteSubtitlePath() + File.separator + sub.getId() + ".vtt");
String content = IOUtils.toString(input, "UTF-8");
content = content.replaceAll(originalContent, changedContent);
content = content.replaceAll(originalContent.replaceAll("\n", "\r\n"), changedContent.replaceAll("\n", "\r\n"));
FileOutputStream output = new FileOutputStream(SettingsContainer.getAbsoluteSubtitlePath() + File.separator + sub.getId() + ".vtt");<|fim▁hole|> IOUtils.write(content, output, "UTF-8");
output.close();
input.close();
}
/**
* Generates a new unique ID for the item
*/
public void generateId()
{
id = (new BigInteger(130, RandomContainer.getRandom())).toString(32);
}
/**
* @return The ID of the Media element associated to this object
*/
public String getTargetID() {
return targetID;
}
/**
* @param targetID The new ID of the Media element associated to this object
*/
public void setTargetID(String targetID) {
this.targetID = targetID;
}
/**
* @return The change itself
*/
public String getChangedContent() {
return changedContent;
}
/**
* @param content The new data to change
*/
public void setChangedContent(String content) {
this.changedContent = content;
}
/**
* @return The change's unique ID
*/
public String getId() {
return id;
}
/**
* @param id The change's new unique ID
*/
public void setId(String id) {
this.id = id;
}
/**
* @return The user IDs who voted for this change
*/
public String getVotes() {
return votes;
}
/**
* @param votes The new user IDs who voted for this change
*/
public void setVotes(String votes) {
this.votes = votes;
nrVotes = votes.length() - votes.replace(";", "").length();
}
/**
* @return The original content prior to changing
*/
public String getOriginalContent() {
return originalContent;
}
/**
* @param originalContent The new original content prior to changing
*/
public void setOriginalContent(String originalContent) {
this.originalContent = originalContent;
}
/**
* @return the nrVotes
*/
public int getNrVotes() {
return nrVotes;
}
/**
* @param nrVotes the nrVotes to set
*/
public void setNrVotes(int nrVotes) {
this.nrVotes = nrVotes;
}
}<|fim▁end|> | |
<|file_name|>client.rs<|end_file_name|><|fim▁begin|>#![deny(warnings)]
#![allow(non_snake_case)]
#[allow(unused_imports)]
use std::vec::Vec;
use object;
use rest::url;
use super::connector::Connector;
pub struct GoodDataClient {
pub connector: Connector,
pub token: Option<String>,
pub environment: Option<String>,
pub driver: Option<String>,
pub user: Option<object::AccountSetting>,
pub projects: Option<Vec<object::Project>>,
}
impl Drop for GoodDataClient {
fn drop(&mut self) {
self.disconnect();
}
}
#[allow(dead_code)]
#[allow(unused_variables)]
#[allow(unreachable_code)]
impl GoodDataClient {
/// Create Instance of GoodData Client
pub fn new(connector: Connector,
token: Option<String>,
environment: Option<String>,
driver: Option<String>)
-> GoodDataClient {
GoodDataClient {
connector: connector,
token: token,
environment: environment,
driver: driver,
user: None,
projects: None,
}
}
/// Get Connector
pub fn connector(&self) -> &Connector {
&self.connector
}
/// Get Projects
pub fn projects(&self) -> &Option<Vec<object::Project>> {
// self.projects_fetch();
&self.projects
}
/// Get user
pub fn user(&self) -> &Option<object::AccountSetting> {
&self.user
}
pub fn projects_fetch_if_none(&mut self) -> &Vec<object::Project> {
match self.projects {
Some(ref projects) => projects,
None => {
self.projects_fetch();
self.projects().as_ref().unwrap()
}
}
}
pub fn create_project(&mut self, project_create: object::ProjectCreate) {
let project =
self.user.as_ref().unwrap().project_create(&mut self.connector, project_create);
match project {
Some(p) => self.projects.as_mut().unwrap().push(p),
None => {}
}<|fim▁hole|>
pub fn delete_project(&mut self, project_delete: object::Project) {
let res = self.user.as_ref().unwrap().project_delete(&mut self.connector, project_delete);
}
pub fn projects_fetch(&mut self) {
let projects = self.user.as_ref().unwrap().projects(&mut self.connector);
self.projects = match projects {
Some(p) => Some(p.projects),
None => None,
}
}
pub fn report_csv(&mut self, report_definition: String) -> String {
let payload = object::ReportReq {
report_req: object::ReportReqBody { reportDefinition: report_definition },
};
let uri = self.connector
.object_by_post::<object::ReportReq, object::Uri>(url::PROJECT_EXECUTE_RAW.to_string(),
payload);
let mut result = self.connector.get(uri.unwrap().uri);
self.connector.get_content(&mut result)
}
/// Login to GoodData platform
pub fn connect<S: Into<String>>(&mut self, username: S, password: S) {
let payload = object::PostUserLogin {
postUserLogin: object::PostUserLoginBody {
login: Some(username.into()),
password: Some(password.into()),
remember: Some("0".into()),
},
};
let user_login = self.connector
.object_by_post::<object::PostUserLogin, object::UserLogin>(url::LOGIN.to_string(),
payload);
let profile_link = user_login.unwrap().userLogin.profile;
self.connector.refresh_token();
let user = self.connector.object_by_get::<object::AccountSetting>(profile_link).unwrap();
self.user = Some(user);
// let csv = self.report_csv("/gdc/md/GoodSalesDemo/obj/30834".to_string());
// debug!("CSV: {}", csv);
}
pub fn disconnect(&mut self) {
info!("GoodDataClient::disconnect() - Disconnecting from GoodData Platform");
self.user = None;
self.projects = None;
}
}<|fim▁end|> | } |
<|file_name|>vwl.js<|end_file_name|><|fim▁begin|>(function(global) {
var vwl = {};
var receivePoster;
var receiveEntry;
var receiveLoadedList;
// vwl.init - advertise VWL info and register for VWL messages
//
// Parameters:
// left - (optional) url of this world's initial left entry image
// right - (optional) url of this world's initial right entry image
// receivePosterFunc - (optional) function to handle poster images from other
// worlds
// receiveEntryFunc - (optional) function to handle entry images from other
// worlds
// recevieLoadedListFunc - (optional) function to handle list of loaded worlds
vwl.init = function(left, right,
receivePosterFunc,
receiveEntryFunc,
receiveLoadedListFunc) {
receivePoster = receivePosterFunc;
receiveEntry = receiveEntryFunc;
receiveLoadedList = receiveLoadedListFunc;
receiveEntry && window.addEventListener('message', function(message) {
if (message.source != window || message.origin != window.location.origin)
return;
if (message.data.tabInfo) {
var left = null;
var right = null;
if (message.data.tabInfo.info && message.data.tabInfo.info.entry_image) {
left = message.data.tabInfo.info.entry_image.left_src;
right = message.data.tabInfo.info.entry_image.right_src;
}
receiveEntry(message.data.tabInfo.url, message.data.tabInfo.loaded,
left, right);
}
if (message.data.loadedList !== undefined) {
receiveLoadedList(message.data.loadedList);
}
<|fim▁hole|> left_src:left, right_src:right}}}, '*');
}
// vwl.getInfo - get info (entry image and poster image) on a specific world
//
// Parameters:
// url - url of worlds to get info on
// getPoster - (optional) if true get the poster image
vwl.getInfo = function(url, getPoster) {
if (receivePoster && getPoster) {
var request = new XMLHttpRequest();
var dir = url.substr(0, url.lastIndexOf('/') + 1);
request.open('GET', dir + 'vwl_info.json');
request.onreadystatechange = function() {
if (request.readyState == 4 && request.status == 200) {
var poster = JSON.parse(request.responseText).poster_image;
receivePoster(url,
poster.left_src ? dir + poster.left_src : null,
poster.right_src ? dir + poster.right_src : null,
poster._2d_src ? dir + poster._2d_src : null);
}
else {
receivePoster(url);
}
}
request.send(null);
}
receiveEntry && window.postMessage({getInfo:url}, '*');
}
// vwl.getLoadedList - get the list of loaded worlds
vwl.getLoadedList = function() {
window.postMessage({getLoadedList:true}, '*');
}
// vwl.open - load world
//
// Parameters:
// url - url of world to open
vwl.open = function(url) {
window.postMessage({open:url}, '*');
}
// vwl.navigate - navigate to a world
//
// Parameters:
// left - (optional) new left entry image for current world
// right - (optional) new right entry image for current world
// url - url of world to navigate to
vwl.navigate = function(left, right, url) {
var message = {navigate:url};
if (left && right) {
message.info = {entry_image:{left_src:left, right_src:right}};
}
window.postMessage(message, '*');
}
global.vwl = vwl;
}) (window);<|fim▁end|> | }, false);
window.postMessage({info:{entry_image:{ |
<|file_name|>forknotify.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the -alertnotify option."""
import os
import time
from test_framework.test_framework import BitcoinTestFramework
class ForkNotifyTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
def setup_network(self):
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
with open(self.alert_filename, 'w', encoding='utf8'):
pass # Just open then close to create zero-length file
self.extra_args = [["-blockversion=2", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""],
["-blockversion=211"]]
super().setup_network()
def run_test(self):
# Mine 51 up-version blocks
self.nodes[1].generate(51)
self.sync_all()
# -alertnotify should trigger on the 51'st,
# but mine and sync another to give
# -alertnotify time to write
self.nodes[1].generate(1)
self.sync_all()
# Give bitcoind 10 seconds to write the alert notification<|fim▁hole|> break
time.sleep(0.1)
timeout -= 0.1
else:
assert False, "-alertnotify did not warn of up-version blocks"
with open(self.alert_filename, 'r', encoding='utf8') as f:
alert_text = f.read()
# Mine more up-version blocks, should not get more alerts:
self.nodes[1].generate(1)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
with open(self.alert_filename, 'r', encoding='utf8') as f:
alert_text2 = f.read()
if alert_text != alert_text2:
raise AssertionError("-alertnotify excessive warning of up-version blocks")
if __name__ == '__main__':
ForkNotifyTest().main()<|fim▁end|> | timeout = 10.0
while timeout > 0:
if os.path.exists(self.alert_filename) and os.path.getsize(self.alert_filename): |
<|file_name|>HBaseTableInputFormat.java<|end_file_name|><|fim▁begin|>/* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.hbase;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.List;
import java.util.ListIterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.mapreduce.TableRecordReader;
import org.apache.hadoop.hbase.mapreduce.TableSplit;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.InputSplit;
public class HBaseTableInputFormat extends TableInputFormat {
private static final Log LOG = LogFactory.getLog(HBaseTableInputFormat.class);
protected final byte[] gt_;
protected final byte[] gte_;
protected final byte[] lt_;
protected final byte[] lte_;
public HBaseTableInputFormat() {
this(-1, null, null, null, null);
}
protected HBaseTableInputFormat(long limit, byte[] gt, byte[] gte, byte[] lt, byte[] lte) {
super();
setTableRecordReader(new HBaseTableRecordReader(limit));
gt_ = gt;
gte_ = gte;
lt_ = lt;
lte_ = lte;
}
public static class HBaseTableIFBuilder {
protected byte[] gt_;
protected byte[] gte_;
protected byte[] lt_;
protected byte[] lte_;
protected long limit_;
protected Configuration conf_;
public HBaseTableIFBuilder withGt(byte[] gt) { gt_ = gt; return this; }
public HBaseTableIFBuilder withGte(byte[] gte) { gte_ = gte; return this; }
public HBaseTableIFBuilder withLt(byte[] lt) { lt_ = lt; return this; }
public HBaseTableIFBuilder withLte(byte[] lte) { lte_ = lte; return this; }<|fim▁hole|> public HBaseTableInputFormat build() {
HBaseTableInputFormat inputFormat = new HBaseTableInputFormat(limit_, gt_, gte_, lt_, lte_);
if (conf_ != null) inputFormat.setConf(conf_);
return inputFormat;
}
}
@Override
public List<InputSplit> getSplits(org.apache.hadoop.mapreduce.JobContext context)
throws IOException {
List<InputSplit> splits = super.getSplits(context);
ListIterator<InputSplit> splitIter = splits.listIterator();
while (splitIter.hasNext()) {
TableSplit split = (TableSplit) splitIter.next();
byte[] startKey = split.getStartRow();
byte[] endKey = split.getEndRow();
// Skip if the region doesn't satisfy configured options.
if ((skipRegion(CompareOp.LESS, startKey, lt_)) ||
(skipRegion(CompareOp.GREATER, endKey, gt_)) ||
(skipRegion(CompareOp.GREATER, endKey, gte_)) ||
(skipRegion(CompareOp.LESS_OR_EQUAL, startKey, lte_)) ) {
splitIter.remove();
}
}
return splits;
}
private boolean skipRegion(CompareOp op, byte[] key, byte[] option ) throws IOException {
if (key.length == 0 || option == null)
return false;
BinaryComparator comp = new BinaryComparator(option);
RowFilter rowFilter = new RowFilter(op, comp);
return rowFilter.filterRowKey(key, 0, key.length);
}
protected class HBaseTableRecordReader extends TableRecordReader {
private long recordsSeen = 0;
private final long limit_;
private byte[] startRow_;
private byte[] endRow_;
private transient byte[] currRow_;
private int maxRowLength;
private BigInteger bigStart_;
private BigInteger bigEnd_;
private BigDecimal bigRange_;
private transient float progressSoFar_ = 0;
public HBaseTableRecordReader(long limit) {
limit_ = limit;
}
@Override
public void setScan(Scan scan) {
super.setScan(scan);
startRow_ = scan.getStartRow();
endRow_ = scan.getStopRow();
byte[] startPadded;
byte[] endPadded;
if (startRow_.length < endRow_.length) {
startPadded = Bytes.padTail(startRow_, endRow_.length - startRow_.length);
endPadded = endRow_;
} else if (endRow_.length < startRow_.length) {
startPadded = startRow_;
endPadded = Bytes.padTail(endRow_, startRow_.length - endRow_.length);
} else {
startPadded = startRow_;
endPadded = endRow_;
}
currRow_ = startRow_;
byte [] prependHeader = {1, 0};
bigStart_ = new BigInteger(Bytes.add(prependHeader, startPadded));
bigEnd_ = new BigInteger(Bytes.add(prependHeader, endPadded));
bigRange_ = new BigDecimal(bigEnd_.subtract(bigStart_));
maxRowLength = endRow_.length > startRow_.length ? endRow_.length : startRow_.length;
LOG.info("setScan with ranges: " + bigStart_ + " - " + bigEnd_ + " ( " + bigRange_ + ")");
}
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (limit_ > 0 && ++recordsSeen > limit_) {
return false;
}
boolean hasMore = super.nextKeyValue();
if (hasMore) {
currRow_ = getCurrentKey().get();
}
return hasMore;
}
@Override
public float getProgress() {
if (currRow_ == null || currRow_.length == 0 || endRow_.length == 0 || endRow_ == HConstants.LAST_ROW) {
return 0;
}
byte[] lastPadded = currRow_;
if(maxRowLength > currRow_.length) {
lastPadded = Bytes.padTail(currRow_, maxRowLength - currRow_.length);
}
byte [] prependHeader = {1, 0};
BigInteger bigLastRow = new BigInteger(Bytes.add(prependHeader, lastPadded));
if (bigLastRow.compareTo(bigEnd_) > 0) {
return progressSoFar_;
}
BigDecimal processed = new BigDecimal(bigLastRow.subtract(bigStart_));
try {
BigDecimal progress = processed.setScale(3).divide(bigRange_, BigDecimal.ROUND_HALF_DOWN);
progressSoFar_ = progress.floatValue();
return progressSoFar_;
} catch (java.lang.ArithmeticException e) {
return 0;
}
}
}
}<|fim▁end|> | public HBaseTableIFBuilder withLimit(long limit) { limit_ = limit; return this; }
public HBaseTableIFBuilder withConf(Configuration conf) { conf_ = conf; return this; }
|
<|file_name|>LoadTestRESTR.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
#
# IM - Infrastructure Manager
# Copyright (C) 2011 - GRyCAP - Universitat Politecnica de Valencia
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from multiprocessing import Process
import unittest
import time
import sys
import os
import random
import datetime
import requests
import json
sys.path.append("..")
sys.path.append(".")
RADL_ADD = "network publica\nnetwork privada\nsystem wn\ndeploy wn 1"
TESTS_PATH = os.path.dirname(os.path.realpath(__file__))
RADL_FILE = TESTS_PATH + '/load-test.radl'
AUTH_FILE = TESTS_PATH + '/auth.dat'
HOSTNAME = "imservice"
TEST_PORT = 8800
MIN_SLEEP = 0
MAX_SLEEP = 5
class LoadTest(unittest.TestCase):
server = None
auth_data = None<|fim▁hole|> def setUpClass(cls):
cls.auth_data = open(AUTH_FILE, 'r').read().replace("\n", "\\n")
cls.inf_id = 0
@classmethod
def tearDownClass(cls):
# Assure that the infrastructure is destroyed
try:
headers = {'AUTHORIZATION': cls.auth_data}
url = "http://%s:%d%s" % (HOSTNAME, TEST_PORT, "/infrastructures/" + cls.inf_id)
requests.request("DELETE", url, headers=headers)
except Exception:
pass
@staticmethod
def wait():
mint = MIN_SLEEP
maxt = MAX_SLEEP
delay = random.uniform(mint, maxt)
time.sleep(delay)
def create_request(self, method, path, headers=None, body=None):
before = time.time()
if headers is None:
headers = {'AUTHORIZATION': self.auth_data}
elif headers != {}:
if 'AUTHORIZATION' not in headers:
headers['AUTHORIZATION'] = self.auth_data
url = "http://%s:%d%s" % (HOSTNAME, TEST_PORT, path)
resp = requests.request(method, url, headers=headers, data=body)
resp_time = time.time() - before
self.__class__.response_times.append(resp_time)
return resp
def test_10_list(self):
resp = self.create_request("GET", "/infrastructures")
self.assertEqual(resp.status_code, 200,
msg="ERROR listing user infrastructures:" + resp.text)
if resp.text:
for inf_id in resp.text.split("\n"):
inf_id = os.path.basename(inf_id)
self.getinfo(inf_id)
self.getstate(inf_id)
self.print_response_times()
def getinfo(self, inf_id):
resp = self.create_request("GET", "/infrastructures/" + inf_id)
self.assertEqual(resp.status_code, 200,
msg="ERROR getting the infrastructure info:" + resp.text)
def getstate(self, inf_id):
resp = self.create_request("GET", "/infrastructures/" + inf_id + "/state")
self.assertEqual(
resp.status_code, 200, msg="ERROR getting the infrastructure state:" + resp.text)
res = json.loads(resp.text)
state = res['state']['state']
vm_states = res['state']['vm_states']
def print_response_times(self):
total = 0.0
for time in self.response_times:
total += time
print("Mean Time: %.4f" % (total / len(self.response_times)))
def test(num_client):
now = datetime.datetime.now()
print(now, ": Launch client num: %d" % num_client)
unittest.main()
now = datetime.datetime.now()
print(now, ": End client num: %d" % num_client)
if __name__ == '__main__':
MAX_THREADS = 1
MAX_CLIENTS = 1
DELAY = 1
if len(sys.argv) > 4:
MAX_SLEEP = float(sys.argv[4])
del sys.argv[4]
if len(sys.argv) > 3:
DELAY = float(sys.argv[3])
del sys.argv[3]
if len(sys.argv) > 2:
MAX_CLIENTS = int(sys.argv[1])
MAX_THREADS = int(sys.argv[2])
del sys.argv[1]
del sys.argv[1]
elif len(sys.argv) > 1:
MAX_CLIENTS = MAX_THREADS = int(sys.argv[1])
del sys.argv[1]
processes = []
remaining = MAX_CLIENTS
while remaining > 0:
now = datetime.datetime.now()
while len(processes) < MAX_THREADS:
p = Process(target=test, args=(MAX_CLIENTS - remaining,))
p.start()
processes.append(p)
remaining -= 1
while len(processes) >= MAX_THREADS:
new_processes = []
for p in processes:
if p.is_alive():
new_processes.append(p)
processes = new_processes
if len(processes) >= MAX_THREADS:
time.sleep(DELAY)<|fim▁end|> | inf_id = 0
response_times = []
@classmethod |
<|file_name|>oauth_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2014 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import datetime
import json
import re
from six.moves import urllib
import httmock
import requests
import six
from girder.constants import SettingKey
from tests import base
def setUpModule():
base.enabledPlugins.append('oauth')
base.startServer()
def tearDownModule():
base.stopServer()
class OauthTest(base.TestCase):
def setUp(self):
base.TestCase.setUp(self)
# girder.plugins is not available until setUp is running
global PluginSettings
from girder.plugins.oauth.constants import PluginSettings
self.adminUser = self.model('user').createUser(
email='[email protected]',
login='admin',
firstName='first',
lastName='last',
password='password',
admin=True
)
# Specifies which test account (typically "new" or "existing") a
# redirect to a provider will simulate authentication for
self.accountType = None
def testDeriveLogin(self):
"""
Unit tests the _deriveLogin method of the provider classes.
"""
from girder.plugins.oauth.providers.base import ProviderBase
login = ProviderBase._deriveLogin('[email protected]', 'John', 'Doe')
self.assertEqual(login, 'johndoe')
login = ProviderBase._deriveLogin('[email protected]', 'A', 'B')
self.assertEqual(login, 'helloworldfoo')
login = ProviderBase._deriveLogin('[email protected]', 'A', 'B', 'user2')
self.assertEqual(login, 'user2')
login = ProviderBase._deriveLogin('[email protected]', 'A', 'B', 'admin')
self.assertEqual(login, 'admin1')
def _testOauth(self, providerInfo):
# Close registration to start off, and simulate a new user
self.model('setting').set(SettingKey.REGISTRATION_POLICY, 'closed')
self.accountType = 'new'
# We should get an empty listing when no providers are set up
params = {
'key': PluginSettings.PROVIDERS_ENABLED,
'value': []
}
resp = self.request(
'/system/setting', user=self.adminUser, method='PUT', params=params)
self.assertStatusOk(resp)
resp = self.request('/oauth/provider', exception=True, params={
'redirect': 'http://localhost/#foo/bar',
'list': True
})
self.assertStatusOk(resp)
self.assertFalse(resp.json)
# Turn on provider, but don't set other settings
params = {
'list': json.dumps([{
'key': PluginSettings.PROVIDERS_ENABLED,
'value': [providerInfo['id']]
}])
}
resp = self.request(
'/system/setting', user=self.adminUser, method='PUT', params=params)
self.assertStatusOk(resp)
resp = self.request('/oauth/provider', exception=True, params={
'redirect': 'http://localhost/#foo/bar'})
self.assertStatus(resp, 500)
# Set up provider normally
params = {
'list': json.dumps([
{
'key': PluginSettings.PROVIDERS_ENABLED,
'value': [providerInfo['id']]
}, {
'key': providerInfo['client_id']['key'],
'value': providerInfo['client_id']['value']
}, {
'key': providerInfo['client_secret']['key'],
'value': providerInfo['client_secret']['value']
}
])
}
resp = self.request(
'/system/setting', user=self.adminUser, method='PUT',
params=params)
self.assertStatusOk(resp)
# No need to re-fetch and test all of these settings values; they will
# be implicitly tested later
# Make sure that if no list param is passed, we receive the old format
resp = self.request('/oauth/provider', params={
'redirect': 'http://localhost/#foo/bar'
})
self.assertStatusOk(resp)
self.assertIsInstance(resp.json, dict)
self.assertEqual(len(resp.json), 1)
self.assertIn(providerInfo['name'], resp.json)
self.assertRegexpMatches(
resp.json[providerInfo['name']],
providerInfo['url_re'])
# This will need to be called several times, to get fresh tokens
def getProviderResp():
resp = self.request('/oauth/provider', params={
'redirect': 'http://localhost/#foo/bar',
'list': True
})
self.assertStatusOk(resp)
self.assertIsInstance(resp.json, list)
self.assertEqual(len(resp.json), 1)
providerResp = resp.json[0]
self.assertSetEqual(
set(six.viewkeys(providerResp)),
{'id', 'name', 'url'})
self.assertEqual(providerResp['id'], providerInfo['id'])
self.assertEqual(providerResp['name'], providerInfo['name'])
self.assertRegexpMatches(
providerResp['url'],
providerInfo['url_re'])
redirectParams = urllib.parse.parse_qs(
urllib.parse.urlparse(providerResp['url']).query)
csrfTokenParts = redirectParams['state'][0].partition('.')
token = self.model('token').load(
csrfTokenParts[0], force=True, objectId=False)
self.assertLess(
token['expires'],
datetime.datetime.utcnow() + datetime.timedelta(days=0.30))
self.assertEqual(
csrfTokenParts[2],
'http://localhost/#foo/bar')
return providerResp
# Try the new format listing
getProviderResp()
# Try callback, for a non-existant provider
resp = self.request('/oauth/foobar/callback')
self.assertStatus(resp, 400)
# Try callback, without providing any params
resp = self.request('/oauth/%s/callback' % providerInfo['id'])
self.assertStatus(resp, 400)
# Try callback, providing params as though the provider failed
resp = self.request(
'/oauth/%s/callback' % providerInfo['id'],
params={
'code': None,
'error': 'some_custom_error',
}, exception=True)
self.assertStatus(resp, 502)
self.assertEqual(
resp.json['message'],
"Provider returned error: 'some_custom_error'.")
# This will need to be called several times, to use fresh tokens
def getCallbackParams(providerResp):
resp = requests.get(providerResp['url'], allow_redirects=False)
self.assertEqual(resp.status_code, 302)
callbackLoc = urllib.parse.urlparse(resp.headers['location'])
self.assertEqual(
callbackLoc.path,
r'/api/v1/oauth/%s/callback' % providerInfo['id'])
callbackLocQuery = urllib.parse.parse_qs(callbackLoc.query)
self.assertNotHasKeys(callbackLocQuery, ('error',))
callbackParams = {
key: val[0] for key, val in six.viewitems(callbackLocQuery)
}
return callbackParams
# Call (simulated) external provider
getCallbackParams(getProviderResp())
# Try callback, with incorrect CSRF token
params = getCallbackParams(getProviderResp())
params['state'] = 'something_wrong'
resp = self.request('/oauth/%s/callback' % providerInfo['id'],
params=params)
self.assertStatus(resp, 403)
self.assertTrue(
resp.json['message'].startswith('Invalid CSRF token'))
# Try callback, with expired CSRF token
params = getCallbackParams(getProviderResp())
token = self.model('token').load(
params['state'].partition('.')[0], force=True, objectId=False)
token['expires'] -= datetime.timedelta(days=1)
self.model('token').save(token)
resp = self.request('/oauth/%s/callback' % providerInfo['id'],
params=params)
self.assertStatus(resp, 403)
self.assertTrue(
resp.json['message'].startswith('Expired CSRF token'))
# Try callback, with a valid CSRF token but no redirect
params = getCallbackParams(getProviderResp())
params['state'] = params['state'].partition('.')[0]
resp = self.request('/oauth/%s/callback' % providerInfo['id'],
params=params)
self.assertStatus(resp, 400)
self.assertTrue(
resp.json['message'].startswith('No redirect location'))
# Try callback, with incorrect code
params = getCallbackParams(getProviderResp())
params['code'] = 'something_wrong'
resp = self.request('/oauth/%s/callback' % providerInfo['id'],
params=params)
self.assertStatus(resp, 502)
# Try callback, with real parameters from provider, but still for the
# 'new' account
params = getCallbackParams(getProviderResp())
resp = self.request('/oauth/%s/callback' % providerInfo['id'],
params=params)
self.assertStatus(resp, 400)
self.assertTrue(
resp.json['message'].startswith(
'Registration on this instance is closed.'))
# This will need to be called several times, and will do a normal login
def doOauthLogin(accountType):
self.accountType = accountType
params = getCallbackParams(getProviderResp())
resp = self.request('/oauth/%s/callback' % providerInfo['id'],
params=params, isJson=False)
self.assertStatus(resp, 303)
self.assertEqual(resp.headers['Location'],
'http://localhost/#foo/bar')
self.assertTrue('girderToken' in resp.cookie)
resp = self.request('/user/me',
token=resp.cookie['girderToken'].value)
self.assertStatusOk(resp)
self.assertEqual(resp.json['email'],
providerInfo['accounts'][accountType]['user']['email'])
self.assertEqual(resp.json['login'],
providerInfo['accounts'][accountType]['user']['login'])
self.assertEqual(resp.json['firstName'],
providerInfo['accounts'][accountType]['user']['firstName'])
self.assertEqual(resp.json['lastName'],<|fim▁hole|>
# Try callback for the 'new' account, with open registration
self.model('setting').set(SettingKey.REGISTRATION_POLICY, 'open')
doOauthLogin('new')
# Password login for 'new' OAuth-only user should fail gracefully
newUser = providerInfo['accounts']['new']['user']
resp = self.request('/user/authentication',
basicAuth='%s:mypasswd' % newUser['login'])
self.assertStatus(resp, 400)
self.assertTrue(
resp.json['message'].startswith('You don\'t have a password.'))
# Reset password for 'new' OAuth-only user should work
self.assertTrue(base.mockSmtp.isMailQueueEmpty())
resp = self.request('/user/password/temporary',
method='PUT', params={
'email': providerInfo['accounts']['new']['user']['email']})
self.assertStatusOk(resp)
self.assertEqual(resp.json['message'], 'Sent temporary access email.')
self.assertTrue(base.mockSmtp.waitForMail())
msg = base.mockSmtp.getMail()
# Pull out the auto-generated token from the email
search = re.search('<a href="(.*)">', msg)
link = search.group(1)
linkParts = link.split('/')
userId = linkParts[-3]
tokenId = linkParts[-1]
tempToken = self.model('token').load(
tokenId, force=True, objectId=False)
resp = self.request('/user/password/temporary/' + userId,
method='GET', params={
'token': tokenId})
self.assertStatusOk(resp)
self.assertEqual(resp.json['user']['login'], newUser['login'])
# We should now be able to change the password
resp = self.request('/user/password',
method='PUT', user=resp.json['user'], params={
'old': tokenId,
'new': 'mypasswd'})
self.assertStatusOk(resp)
# The temp token should get deleted on password change
token = self.model('token').load(tempToken, force=True, objectId=False)
self.assertEqual(token, None)
# Password login for 'new' OAuth-only user should now succeed
resp = self.request('/user/authentication',
basicAuth='%s:mypasswd' % newUser['login'])
self.assertStatusOk(resp)
@httmock.all_requests
def mockOtherRequest(self, url, request):
raise Exception('Unexpected url %s' % str(request.url))
def testGoogleOauth(self):
providerInfo = {
'id': 'google',
'name': 'Google',
'client_id': {
'key': PluginSettings.GOOGLE_CLIENT_ID,
'value': 'google_test_client_id'
},
'client_secret': {
'key': PluginSettings.GOOGLE_CLIENT_SECRET,
'value': 'google_test_client_secret'
},
'allowed_callback_re':
r'^http://127\.0\.0\.1(?::\d+)?/api/v1/oauth/google/callback$',
'url_re': r'^https://accounts\.google\.com/o/oauth2/auth',
'accounts': {
'existing': {
'auth_code': 'google_existing_auth_code',
'access_token': 'google_existing_test_token',
'user': {
'login': self.adminUser['login'],
'email': self.adminUser['email'],
'firstName': self.adminUser['firstName'],
'lastName': self.adminUser['lastName'],
'oauth': {
'provider': 'google',
'id': '5326'
}
}
},
'new': {
'auth_code': 'google_new_auth_code',
'access_token': 'google_new_test_token',
'user': {
# this login will be created internally by _deriveLogin
'login': 'googleuser',
'email': '[email protected]',
'firstName': 'John',
'lastName': 'Doe',
'oauth': {
'provider': 'google',
'id': '9876'
}
}
}
}
}
@httmock.urlmatch(scheme='https', netloc='^accounts.google.com$',
path='^/o/oauth2/auth$', method='GET')
def mockGoogleRedirect(url, request):
try:
params = urllib.parse.parse_qs(url.query)
self.assertEqual(
params['response_type'],
['code'])
self.assertEqual(
params['access_type'],
['online'])
self.assertEqual(
params['scope'],
['profile email'])
except (KeyError, AssertionError) as e:
return {
'status_code': 400,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertEqual(
params['client_id'],
[providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 401,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertRegexpMatches(
params['redirect_uri'][0],
providerInfo['allowed_callback_re'])
state = params['state'][0]
# Nothing to test for state, since provider doesn't care
except (KeyError, AssertionError) as e:
return {
'status_code': 400,
'content': json.dumps({
'error': repr(e)
})
}
returnQuery = urllib.parse.urlencode({
'state': state,
'code': providerInfo['accounts'][self.accountType]['auth_code']
})
return {
'status_code': 302,
'headers': {
'Location': '%s?%s' % (params['redirect_uri'][0],
returnQuery)
}
}
@httmock.urlmatch(scheme='https', netloc='^accounts.google.com$',
path='^/o/oauth2/token$', method='POST')
def mockGoogleToken(url, request):
try:
params = urllib.parse.parse_qs(request.body)
self.assertEqual(
params['client_id'],
[providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 401,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertEqual(
params['grant_type'],
['authorization_code'])
self.assertEqual(
params['client_secret'],
[providerInfo['client_secret']['value']])
self.assertRegexpMatches(
params['redirect_uri'][0],
providerInfo['allowed_callback_re'])
for account in six.viewvalues(providerInfo['accounts']):
if account['auth_code'] == params['code'][0]:
break
else:
self.fail()
except (KeyError, AssertionError) as e:
return {
'status_code': 400,
'content': json.dumps({
'error': repr(e)
})
}
return json.dumps({
'token_type': 'Bearer',
'access_token': account['access_token'],
'expires_in': 3546,
'id_token': 'google_id_token'
})
@httmock.urlmatch(scheme='https', netloc='^www.googleapis.com$',
path='^/plus/v1/people/me$', method='GET')
def mockGoogleApi(url, request):
try:
for account in six.viewvalues(providerInfo['accounts']):
if 'Bearer %s' % account['access_token'] == \
request.headers['Authorization']:
break
else:
self.fail()
params = urllib.parse.parse_qs(url.query)
self.assertSetEqual(
set(params['fields'][0].split(',')),
{'id', 'emails', 'name'})
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'error': repr(e)
})
}
return json.dumps({
'id': account['user']['oauth']['id'],
'name': {
'givenName': account['user']['firstName'],
'familyName': account['user']['lastName']
},
'emails': [
{
'type': 'other',
'value': '[email protected]'
}, {
'type': 'account',
'value': account['user']['email']
}
]
})
with httmock.HTTMock(
mockGoogleRedirect,
mockGoogleToken,
mockGoogleApi,
# Must keep "mockOtherRequest" last
self.mockOtherRequest
):
self._testOauth(providerInfo)
def testGithubOauth(self):
providerInfo = {
'id': 'github',
'name': 'GitHub',
'client_id': {
'key': PluginSettings.GITHUB_CLIENT_ID,
'value': 'github_test_client_id'
},
'client_secret': {
'key': PluginSettings.GITHUB_CLIENT_SECRET,
'value': 'github_test_client_secret'
},
'allowed_callback_re':
r'^http://127\.0\.0\.1(?::\d+)?/api/v1/oauth/github/callback$',
'url_re': r'^https://github\.com/login/oauth/authorize',
'accounts': {
'existing': {
'auth_code': 'github_existing_auth_code',
'access_token': 'github_existing_test_token',
'user': {
'login': self.adminUser['login'],
'email': self.adminUser['email'],
'firstName': self.adminUser['firstName'],
'lastName': self.adminUser['lastName'],
'oauth': {
'provider': 'github',
'id': '2399'
}
}
},
'new': {
'auth_code': 'github_new_auth_code',
'access_token': 'github_new_test_token',
'user': {
# login may be provided externally by GitHub; for
# simplicity here, do not use a username with whitespace
# or underscores
'login': 'jane83',
'email': '[email protected]',
'firstName': 'Jane',
'lastName': 'Doe',
'oauth': {
'provider': 'github',
'id': 1234
}
}
}
}
}
@httmock.urlmatch(scheme='https', netloc='^github.com$',
path='^/login/oauth/authorize$', method='GET')
def mockGithubRedirect(url, request):
redirectUri = None
try:
params = urllib.parse.parse_qs(url.query)
# Check redirect_uri first, so other errors can still redirect
redirectUri = params['redirect_uri'][0]
self.assertEqual(
params['client_id'],
[providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 404,
'content': json.dumps({
'error': repr(e)
})
}
try:
self.assertRegexpMatches(
redirectUri,
providerInfo['allowed_callback_re'])
state = params['state'][0]
# Nothing to test for state, since provider doesn't care
self.assertEqual(
params['scope'],
['user:email'])
except (KeyError, AssertionError) as e:
returnQuery = urllib.parse.urlencode({
'error': repr(e),
})
else:
returnQuery = urllib.parse.urlencode({
'state': state,
'code': providerInfo['accounts'][self.accountType]['auth_code']
})
return {
'status_code': 302,
'headers': {
'Location': '%s?%s' % (redirectUri, returnQuery)
}
}
@httmock.urlmatch(scheme='https', netloc='^github.com$',
path='^/login/oauth/access_token$', method='POST')
def mockGithubToken(url, request):
try:
self.assertEqual(request.headers['Accept'], 'application/json')
params = urllib.parse.parse_qs(request.body)
self.assertEqual(
params['client_id'],
[providerInfo['client_id']['value']])
except (KeyError, AssertionError) as e:
return {
'status_code': 404,
'content': json.dumps({
'error': repr(e)
})
}
try:
for account in six.viewvalues(providerInfo['accounts']):
if account['auth_code'] == params['code'][0]:
break
else:
self.fail()
self.assertEqual(
params['client_secret'],
[providerInfo['client_secret']['value']])
self.assertRegexpMatches(
params['redirect_uri'][0],
providerInfo['allowed_callback_re'])
except (KeyError, AssertionError) as e:
returnBody = json.dumps({
'error': repr(e),
'error_description': repr(e)
})
else:
returnBody = json.dumps({
'token_type': 'bearer',
'access_token': account['access_token'],
'scope': 'user:email'
})
return {
'status_code': 200,
'headers': {
'Content-Type': 'application/json'
},
'content': returnBody
}
@httmock.urlmatch(scheme='https', netloc='^api.github.com$',
path='^/user$', method='GET')
def mockGithubApiUser(url, request):
try:
for account in six.viewvalues(providerInfo['accounts']):
if 'token %s' % account['access_token'] == \
request.headers['Authorization']:
break
else:
self.fail()
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'message': repr(e)
})
}
return json.dumps({
'id': account['user']['oauth']['id'],
'login': account['user']['login'],
'name': '%s %s' % (account['user']['firstName'],
account['user']['lastName'])
})
@httmock.urlmatch(scheme='https', netloc='^api.github.com$',
path='^/user/emails$', method='GET')
def mockGithubApiEmail(url, request):
try:
for account in six.viewvalues(providerInfo['accounts']):
if 'token %s' % account['access_token'] == \
request.headers['Authorization']:
break
else:
self.fail()
except AssertionError as e:
return {
'status_code': 401,
'content': json.dumps({
'message': repr(e)
})
}
return json.dumps([
{
'primary': False,
'email': '[email protected]',
'verified': True
}, {
'primary': True,
'email': account['user']['email'],
'verified': True
}
])
with httmock.HTTMock(
mockGithubRedirect,
mockGithubToken,
mockGithubApiUser,
mockGithubApiEmail,
# Must keep "mockOtherRequest" last
self.mockOtherRequest
):
self._testOauth(providerInfo)<|fim▁end|> | providerInfo['accounts'][accountType]['user']['lastName'])
# Try callback for the 'existing' account, which should succeed
doOauthLogin('existing') |
<|file_name|>chibios.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
"""
Waf tool for ChibiOS build
"""
from waflib import Errors, Logs, Task, Utils
from waflib.TaskGen import after_method, before_method, feature
import os
import shutil
import sys
import re
import pickle
_dynamic_env_data = {}
def _load_dynamic_env_data(bld):
bldnode = bld.bldnode.make_node('modules/ChibiOS')
tmp_str = bldnode.find_node('include_dirs').read()
tmp_str = tmp_str.replace(';\n','')
tmp_str = tmp_str.replace('-I','') #remove existing -I flags
# split, coping with separator
idirs = re.split('; ', tmp_str)
# create unique list, coping with relative paths
idirs2 = []
for d in idirs:
if d.startswith('../'):
# relative paths from the make build are relative to BUILDROOT
d = os.path.join(bld.env.BUILDROOT, d)
d = os.path.normpath(d)
if not d in idirs2:
idirs2.append(d)
_dynamic_env_data['include_dirs'] = idirs2
@feature('ch_ap_library', 'ch_ap_program')
@before_method('process_source')
def ch_dynamic_env(self):
# The generated files from configuration possibly don't exist if it's just
# a list command (TODO: figure out a better way to address that).<|fim▁hole|> if self.bld.cmd == 'list':
return
if not _dynamic_env_data:
_load_dynamic_env_data(self.bld)
self.use += ' ch'
self.env.append_value('INCLUDES', _dynamic_env_data['include_dirs'])
class upload_fw(Task.Task):
color='BLUE'
always_run = True
def run(self):
upload_tools = self.env.get_flat('UPLOAD_TOOLS')
src = self.inputs[0]
return self.exec_command("python '{}/px_uploader.py' '{}'".format(upload_tools, src))
def exec_command(self, cmd, **kw):
kw['stdout'] = sys.stdout
return super(upload_fw, self).exec_command(cmd, **kw)
def keyword(self):
return "Uploading"
class set_default_parameters(Task.Task):
color='CYAN'
always_run = True
def keyword(self):
return "apj_tool"
def run(self):
rel_default_parameters = self.env.get_flat('DEFAULT_PARAMETERS')
abs_default_parameters = os.path.join(self.env.SRCROOT, rel_default_parameters)
apj_tool = self.env.APJ_TOOL
sys.path.append(os.path.dirname(apj_tool))
from apj_tool import embedded_defaults
defaults = embedded_defaults(self.inputs[0].abspath())
if not defaults.find():
print("Error: Param defaults support not found in firmware")
sys.exit(1)
defaults.set_file(abs_default_parameters)
defaults.save()
class generate_bin(Task.Task):
color='CYAN'
run_str="${OBJCOPY} -O binary ${SRC} ${TGT}"
always_run = True
def keyword(self):
return "Generating"
def __str__(self):
return self.outputs[0].path_from(self.generator.bld.bldnode)
class generate_apj(Task.Task):
'''generate an apj firmware file'''
color='CYAN'
always_run = True
def keyword(self):
return "apj_gen"
def run(self):
import json, time, base64, zlib
img = open(self.inputs[0].abspath(),'rb').read()
d = {
"board_id": int(self.env.APJ_BOARD_ID),
"magic": "APJFWv1",
"description": "Firmware for a %s board" % self.env.APJ_BOARD_TYPE,
"image": base64.b64encode(zlib.compress(img,9)).decode('utf-8'),
"build_time": int(time.time()),
"summary": self.env.BOARD,
"version": "0.1",
"image_size": len(img),
"git_identity": self.generator.bld.git_head_hash(short=True),
"board_revision": 0
}
apj_file = self.outputs[0].abspath()
f = open(apj_file, "w")
f.write(json.dumps(d, indent=4))
f.close()
class build_abin(Task.Task):
'''build an abin file for skyviper firmware upload via web UI'''
color='CYAN'
run_str='${TOOLS_SCRIPTS}/make_abin.sh ${SRC}.bin ${SRC}.abin'
always_run = True
def keyword(self):
return "Generating"
def __str__(self):
return self.outputs[0].path_from(self.generator.bld.bldnode)
class build_intel_hex(Task.Task):
'''build an intel hex file for upload with DFU'''
color='CYAN'
run_str='${TOOLS_SCRIPTS}/make_intel_hex.py ${SRC} ${FLASH_RESERVE_START_KB}'
always_run = True
def keyword(self):
return "Generating"
def __str__(self):
return self.outputs[0].path_from(self.generator.bld.bldnode)
@feature('ch_ap_program')
@after_method('process_source')
def chibios_firmware(self):
self.link_task.always_run = True
link_output = self.link_task.outputs[0]
bin_target = self.bld.bldnode.find_or_declare('bin/' + link_output.change_ext('.bin').name)
apj_target = self.bld.bldnode.find_or_declare('bin/' + link_output.change_ext('.apj').name)
generate_bin_task = self.create_task('generate_bin', src=link_output, tgt=bin_target)
generate_bin_task.set_run_after(self.link_task)
generate_apj_task = self.create_task('generate_apj', src=bin_target, tgt=apj_target)
generate_apj_task.set_run_after(generate_bin_task)
if self.env.BUILD_ABIN:
abin_target = self.bld.bldnode.find_or_declare('bin/' + link_output.change_ext('.abin').name)
abin_task = self.create_task('build_abin', src=link_output, tgt=abin_target)
abin_task.set_run_after(generate_apj_task)
bootloader_bin = self.bld.srcnode.make_node("Tools/bootloaders/%s_bl.bin" % self.env.BOARD)
if os.path.exists(bootloader_bin.abspath()) and self.bld.env.HAVE_INTEL_HEX:
hex_target = self.bld.bldnode.find_or_declare('bin/' + link_output.change_ext('.hex').name)
hex_task = self.create_task('build_intel_hex', src=[bin_target, bootloader_bin], tgt=hex_target)
hex_task.set_run_after(generate_bin_task)
if self.env.DEFAULT_PARAMETERS:
default_params_task = self.create_task('set_default_parameters',
src=link_output)
default_params_task.set_run_after(self.link_task)
generate_bin_task.set_run_after(default_params_task)
if self.bld.options.upload:
_upload_task = self.create_task('upload_fw', src=apj_target)
_upload_task.set_run_after(generate_apj_task)
def setup_can_build(cfg):
'''enable CAN build. By doing this here we can auto-enable CAN in
the build based on the presence of CAN pins in hwdef.dat'''
env = cfg.env
env.AP_LIBRARIES += [
'AP_UAVCAN',
'modules/uavcan/libuavcan/src/**/*.cpp',
'modules/uavcan/libuavcan_drivers/stm32/driver/src/*.cpp'
]
env.CFLAGS += ['-DUAVCAN_STM32_CHIBIOS=1',
'-DUAVCAN_STM32_NUM_IFACES=2']
env.CXXFLAGS += [
'-Wno-error=cast-align',
'-DUAVCAN_STM32_CHIBIOS=1',
'-DUAVCAN_STM32_NUM_IFACES=2'
]
env.DEFINES += [
'UAVCAN_CPP_VERSION=UAVCAN_CPP03',
'UAVCAN_NO_ASSERTIONS=1',
'UAVCAN_NULLPTR=nullptr'
]
env.INCLUDES += [
cfg.srcnode.find_dir('modules/uavcan/libuavcan/include').abspath(),
cfg.srcnode.find_dir('modules/uavcan/libuavcan_drivers/stm32/driver/include').abspath()
]
cfg.get_board().with_uavcan = True
def load_env_vars(env):
'''optionally load extra environment variables from env.py in the build directory'''
print("Checking for env.py")
env_py = os.path.join(env.BUILDROOT, 'env.py')
if not os.path.exists(env_py):
print("No env.py found")
return
e = pickle.load(open(env_py, 'rb'))
for k in e.keys():
v = e[k]
if k == 'ROMFS_FILES':
env.ROMFS_FILES += v
continue
if k in env:
if isinstance(env[k], dict):
a = v.split('=')
env[k][a[0]] = '='.join(a[1:])
print("env updated %s=%s" % (k, v))
elif isinstance(env[k], list):
env[k].append(v)
print("env appended %s=%s" % (k, v))
else:
env[k] = v
print("env added %s=%s" % (k, v))
else:
env[k] = v
print("env set %s=%s" % (k, v))
def configure(cfg):
cfg.find_program('make', var='MAKE')
#cfg.objcopy = cfg.find_program('%s-%s'%(cfg.env.TOOLCHAIN,'objcopy'), var='OBJCOPY', mandatory=True)
cfg.find_program('arm-none-eabi-objcopy', var='OBJCOPY')
env = cfg.env
bldnode = cfg.bldnode.make_node(cfg.variant)
def srcpath(path):
return cfg.srcnode.make_node(path).abspath()
def bldpath(path):
return bldnode.make_node(path).abspath()
env.AP_PROGRAM_FEATURES += ['ch_ap_program']
kw = env.AP_LIBRARIES_OBJECTS_KW
kw['features'] = Utils.to_list(kw.get('features', [])) + ['ch_ap_library']
env.CH_ROOT = srcpath('modules/ChibiOS')
env.AP_HAL_ROOT = srcpath('libraries/AP_HAL_ChibiOS')
env.BUILDDIR = bldpath('modules/ChibiOS')
env.BUILDROOT = bldpath('')
env.SRCROOT = srcpath('')
env.PT_DIR = srcpath('Tools/ardupilotwaf/chibios/image')
env.UPLOAD_TOOLS = srcpath('Tools/ardupilotwaf')
env.CHIBIOS_SCRIPTS = srcpath('libraries/AP_HAL_ChibiOS/hwdef/scripts')
env.TOOLS_SCRIPTS = srcpath('Tools/scripts')
env.APJ_TOOL = srcpath('Tools/scripts/apj_tool.py')
env.SERIAL_PORT = srcpath('/dev/serial/by-id/*_STLink*')
# relative paths to pass to make, relative to directory that make is run from
env.CH_ROOT_REL = os.path.relpath(env.CH_ROOT, env.BUILDROOT)
env.AP_HAL_REL = os.path.relpath(env.AP_HAL_ROOT, env.BUILDROOT)
env.BUILDDIR_REL = os.path.relpath(env.BUILDDIR, env.BUILDROOT)
mk_custom = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/chibios_board.mk' % env.BOARD)
mk_common = srcpath('libraries/AP_HAL_ChibiOS/hwdef/common/chibios_board.mk')
# see if there is a board specific make file
if os.path.exists(mk_custom):
env.BOARD_MK = mk_custom
else:
env.BOARD_MK = mk_common
if cfg.options.default_parameters:
cfg.msg('Default parameters', cfg.options.default_parameters, color='YELLOW')
env.DEFAULT_PARAMETERS = srcpath(cfg.options.default_parameters)
# we need to run chibios_hwdef.py at configure stage to generate the ldscript.ld
# that is needed by the remaining configure checks
import subprocess
if env.BOOTLOADER:
env.HWDEF = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/hwdef-bl.dat' % env.BOARD)
env.BOOTLOADER_OPTION="--bootloader"
else:
env.HWDEF = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/hwdef.dat' % env.BOARD)
env.BOOTLOADER_OPTION=""
hwdef_script = srcpath('libraries/AP_HAL_ChibiOS/hwdef/scripts/chibios_hwdef.py')
hwdef_out = env.BUILDROOT
if not os.path.exists(hwdef_out):
os.mkdir(hwdef_out)
try:
cmd = "python '{0}' -D '{1}' '{2}' {3}".format(hwdef_script, hwdef_out, env.HWDEF, env.BOOTLOADER_OPTION)
ret = subprocess.call(cmd, shell=True)
except Exception:
cfg.fatal("Failed to process hwdef.dat")
if ret != 0:
cfg.fatal("Failed to process hwdef.dat ret=%d" % ret)
load_env_vars(cfg.env)
if env.HAL_WITH_UAVCAN:
setup_can_build(cfg)
def pre_build(bld):
'''pre-build hook to change dynamic sources'''
load_env_vars(bld.env)
if bld.env.HAL_WITH_UAVCAN:
bld.get_board().with_uavcan = True
def build(bld):
bld(
# build hwdef.h from hwdef.dat. This is needed after a waf clean
source=bld.path.ant_glob(bld.env.HWDEF),
rule="python '${AP_HAL_ROOT}/hwdef/scripts/chibios_hwdef.py' -D '${BUILDROOT}' '%s' %s" % (bld.env.HWDEF, bld.env.BOOTLOADER_OPTION),
group='dynamic_sources',
target=[bld.bldnode.find_or_declare('hwdef.h'),
bld.bldnode.find_or_declare('ldscript.ld')]
)
bld(
# create the file modules/ChibiOS/include_dirs
rule="touch Makefile && BUILDDIR=${BUILDDIR_REL} CHIBIOS=${CH_ROOT_REL} AP_HAL=${AP_HAL_REL} ${CHIBIOS_BUILD_FLAGS} ${CHIBIOS_BOARD_NAME} ${MAKE} pass -f '${BOARD_MK}'",
group='dynamic_sources',
target=bld.bldnode.find_or_declare('modules/ChibiOS/include_dirs')
)
common_src = [bld.bldnode.find_or_declare('hwdef.h'),
bld.bldnode.find_or_declare('modules/ChibiOS/include_dirs')]
common_src += bld.path.ant_glob('libraries/AP_HAL_ChibiOS/hwdef/common/*.[ch]')
common_src += bld.path.ant_glob('libraries/AP_HAL_ChibiOS/hwdef/common/*.mk')
common_src += bld.path.ant_glob('modules/ChibiOS/os/hal/**/*.[ch]')
common_src += bld.path.ant_glob('modules/ChibiOS/os/hal/**/*.mk')
if bld.env.ROMFS_FILES:
common_src += [bld.bldnode.find_or_declare('ap_romfs_embedded.h')]
ch_task = bld(
# build libch.a from ChibiOS sources and hwdef.h
rule="BUILDDIR='${BUILDDIR_REL}' CHIBIOS='${CH_ROOT_REL}' AP_HAL=${AP_HAL_REL} ${CHIBIOS_BUILD_FLAGS} ${CHIBIOS_BOARD_NAME} '${MAKE}' lib -f '${BOARD_MK}'",
group='dynamic_sources',
source=common_src,
target=bld.bldnode.find_or_declare('modules/ChibiOS/libch.a')
)
ch_task.name = "ChibiOS_lib"
bld.env.LIB += ['ch']
bld.env.LIBPATH += ['modules/ChibiOS/']
wraplist = ['strerror_r', 'fclose', 'freopen', 'fread']
for w in wraplist:
bld.env.LINKFLAGS += ['-Wl,--wrap,%s' % w]<|fim▁end|> | |
<|file_name|>VirtualAudioNodeBase.ts<|end_file_name|><|fim▁begin|>import { CustomVirtualAudioNodeFactory, VirtualAudioNode } from "../types";
export default abstract class VirtualAudioNodeBase {
public readonly node!: string | CustomVirtualAudioNodeFactory;<|fim▁hole|> public cannotUpdateInPlace(newVirtualAudioNode: VirtualAudioNode): boolean {
return newVirtualAudioNode.node !== this.node;
}
}<|fim▁end|> | |
<|file_name|>table.js<|end_file_name|><|fim▁begin|>goog.provide('recoil.structs.table.ColumnKey');
goog.provide('recoil.structs.table.MutableTable');
goog.provide('recoil.structs.table.MutableTableRow');
goog.provide('recoil.structs.table.Table');
goog.provide('recoil.structs.table.TableCell');
goog.provide('recoil.structs.table.TableInterface');
goog.provide('recoil.structs.table.TableRow');
goog.provide('recoil.structs.table.TableRowInterface');
// TODO mutable/immutable versions of table and rows
// a heirachy of rows
// changes function
// also in table widget factory produces widget, but we need a changed function
// have a primary key, but what happens if that can change?
goog.require('goog.array');
goog.require('goog.math.Long');
goog.require('goog.structs.AvlTree');
goog.require('goog.structs.Collection');
goog.require('recoil.util.Sequence');
goog.require('recoil.util.object');
/**
* @interface
*/
recoil.structs.table.TableInterface = function() {};
/**
* this ensures the sort order, the parameters to the function are columnkey and column meta data
*
* @param {function(!recoil.structs.table.ColumnKey,!Object) : *} func
*/
recoil.structs.table.TableInterface.prototype.forEachPlacedColumn = function(func) {};
/**
* @interface
*/
recoil.structs.table.TableRowInterface = function() {};
/**
* Gets only the value from the cell
* @template CT
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @return {CT}
*/
recoil.structs.table.TableRowInterface.prototype.get = function(column) {};
/**
* @param {function(string,!recoil.structs.table.TableCell)} func
*/
recoil.structs.table.TableRowInterface.prototype.forEachColumn = function(func) {};
/**
* Get the value and meta data from the cell
* @template CT
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @return {recoil.structs.table.TableCell<CT>}
*/
recoil.structs.table.TableRowInterface.prototype.getCell = function(column) {};
/**
* @template T
* @constructor
* @param {string} name
* @param {function(T,T) : number=} opt_comparator used for key values only needed if > < = do not work and it is a primary key
* @param {function(*) : T=} opt_castTo
* @param {T=} opt_default
* @param {function():T=} opt_defaultFunc
*/
recoil.structs.table.ColumnKey = function(name, opt_comparator, opt_castTo, opt_default, opt_defaultFunc) {
this.name_ = name;
this.comparator_ = opt_comparator || recoil.structs.table.ColumnKey.defaultComparator_;
this.castTo_ = opt_castTo || function(x) {return x;};
this.hasDefault_ = arguments.length > 3;
this.default_ = opt_default;
this.defaultFunc_ = opt_defaultFunc;
this.id_ = recoil.structs.table.ColumnKey.nextId_.next();
};
/**
* @param {string} name
* @return {!recoil.structs.table.ColumnKey<string>}
*/
recoil.structs.table.ColumnKey.createUnique = function(name) {
var seq = new recoil.util.Sequence();
return new recoil.structs.table.ColumnKey(name, undefined, undefined, '', function() {
return seq.next();
});
};
/**
* this function can be used to make 2 primary keys have
* the same default function, this can be useful if you want
* to have primary keys to be unique between accross to keys
*
* note this should really be called only once and before the column is
* used to generate any primary keys
*
* @param {!recoil.structs.table.ColumnKey} otherKey
*/
recoil.structs.table.ColumnKey.prototype.setSameDefaultFunc = function(otherKey) {
if (goog.isFunction(this.defaultFunc_)) {
otherKey.defaultFunc_ = this.defaultFunc_;
otherKey.hasDefault_ = true;
return;
}
throw new Error(this + ' does not have a default function');
};
/**
* @return {T}
*/
recoil.structs.table.ColumnKey.prototype.getDefault = function() {
if (goog.isFunction(this.defaultFunc_)) {
return this.defaultFunc_();
}
return this.default_;
};
/**
* @return {!recoil.structs.table.ColumnKey}
*/
recoil.structs.table.ColumnKey.prototype.clone = function() {
return this;
};
/**
* @return {boolean}
*/
recoil.structs.table.ColumnKey.prototype.hasDefault = function() {
return this.hasDefault_;
};
/**
* @type {!recoil.util.Sequence}
* @private
*/
recoil.structs.table.ColumnKey.nextId_ = new recoil.util.Sequence();
/**
* given the primary keys converts keys into a table, if there is more than
* 1 primary key this requires keys to be an array
* @param {!Array<!recoil.structs.table.ColumnKey>} primaryKeys
* @param {!Array<?>} keys
* @param {number=} opt_order
* @return {recoil.structs.table.TableRow}
*/
recoil.structs.table.ColumnKey.normalizeColumns = function(primaryKeys, keys, opt_order) {
var res = new recoil.structs.table.MutableTableRow(opt_order);
if (primaryKeys.length !== keys.length) {
throw 'incorrect number of primary keys';
}
else {
for (var i = 0; i < primaryKeys.length; i++) {
res.set(primaryKeys[i], primaryKeys[i].castTo(keys[i]));
}
}
return res.freeze();
};
/**
* @private
* @param {*} a
* @param {*} b
* @return {number}
*/
recoil.structs.table.ColumnKey.defaultComparator_ = function(a, b) {
if (a === b) {
return 0;
}
if (a === null) {
return -1;
}
if (b === null) {
return 1;
}
if (a === undefined) {
return -1;
}
if (b === undefined) {
return 1;
}
if (typeof(a) !== typeof(b)) {
return typeof(a) < typeof(b) ? -1 : 1;
}
if (a < b) {
return -1;
}
if (b < a) {
return 1;
}
return 0;
};
/**
* A default column key if none is provided they will be added sequentially
* @type {!recoil.structs.table.ColumnKey<!goog.math.Long>}
*/
recoil.structs.table.ColumnKey.INDEX =
new recoil.structs.table.ColumnKey(
'index', undefined,
function(x) {
if (x instanceof goog.math.Long) {
return x;
}
return goog.math.Long.fromNumber(parseInt(x, 10));
});
/**
* A default column that is used to store meta information for the row
* @type {!recoil.structs.table.ColumnKey<*>}
*/
recoil.structs.table.ColumnKey.ROW_META = new recoil.structs.table. ColumnKey('meta', undefined, undefined);
/**
* compares to values for column
* @param {T} a
* @param {T} b
* @return {number}
*/
recoil.structs.table.ColumnKey.prototype.valCompare = function(a, b) {
return this.comparator_(a, b);
};
/**
* compares to values for column
* @param {T} a
* @return {number|undefined}
*/
recoil.structs.table.ColumnKey.prototype.compare = function(a) {
if (a instanceof recoil.structs.table.CombinedColumnKey) {
return -a.compare(this);
}
if (a instanceof recoil.structs.table.ColumnKey) {
return this.id_ - a.id_;
}
return undefined;
};
/**
* @return {string}
*/
recoil.structs.table.ColumnKey.prototype.getId = function() {
return this.toString();
};
/**
* @return {string}
*/
recoil.structs.table.ColumnKey.prototype.toString = function() {
return this.name_ === undefined ? this.id_ : this.name_ + ':' + this.id_;
};
/**
* @param {*} a
* @return {T}
*/
recoil.structs.table.ColumnKey.prototype.castTo = function(a) {
return this.castTo_(a);
};
/**
* @return {string}
*/
recoil.structs.table.ColumnKey.prototype.getName = function() {
return this.name_ === undefined ? ('ID(' + this.id_ + ')') : this.name_;
};
/**
*
* @param {recoil.structs.table.ColumnKey} a
* @param {recoil.structs.table.ColumnKey} b
* @return {number}
*/
recoil.structs.table.ColumnKey.comparator = function(a , b) {
if (a.id_ < b.id_) {
return -1;
}
if (a.id_ > b.id_) {
return 1;
}
return 0;
};
/**
* @template T
* @extends {recoil.structs.table.ColumnKey}
* @param {!Array<recoil.structs.table.ColumnKey>} columnKeys
* @constructor
**/
recoil.structs.table.CombinedColumnKey = function(columnKeys) {
this.name_ = columnKeys.map(function(c) {return c.getName();}).join(',');
this.subKeys_ = columnKeys;
};
/**
* not implemented for combined keys set the column keys to be the same
* @param {!recoil.structs.table.ColumnKey} otherKey
*/
recoil.structs.table.CombinedColumnKey.prototype.setSameDefaultFunc = function(otherKey) {
throw new Error('not supported fro combined keys');
};
/**
* @return {T}
*/
recoil.structs.table.CombinedColumnKey.prototype.getDefault = function() {
return this.subKeys_.map(function(c) {return c.getDefault();});
};
/**
* @return {!recoil.structs.table.ColumnKey}
*/
recoil.structs.table.CombinedColumnKey.prototype.clone = function() {
return this;
};
/**
* @return {boolean}
*/
recoil.structs.table.CombinedColumnKey.prototype.hasDefault = function() {
return this.subKeys_.reduce(function(acc, v) {return acc && v;},true);
};
/**
* compares to values for column
* @param {T} a
* @param {T} b
* @return {number}
*/
recoil.structs.table.CombinedColumnKey.prototype.valCompare = function(a, b) {
if (a instanceof Array && b instanceof Array) {
if (a.length === this.subKeys_.length && b.length === this.subKeys_.length) {
for (var i = 0; i < this.subKeys_.length; i++) {
var res = this.subKeys_[i].valCompare(a[i], b[i]);
if (res !== 0) {
return res;
}
}
return 0;
}
}
return recoil.util.object.compare(a, b);
};
/**
* compares to values for column
* @param {T} a
* @return {number|undefined}
*/
recoil.structs.table.CombinedColumnKey.prototype.compare = function(a) {
if (a instanceof recoil.structs.table.CombinedColumnKey) {
var res = this.subKeys_.length - a.subKeys_.length;
if (res !== 0) {
return res;
}
for (var i = 0; i < this.subKeys_.length; i++) {
res = this.subKeys_[i].compare(a.subKeys_[i]);
if (res !== 0) {
return res;
}
}
return 0;
}
if (a instanceof recoil.structs.table.ColumnKey) {
return -1;
}
return undefined;
};
/**
* @return {string}
*/
recoil.structs.table.CombinedColumnKey.prototype.equals = function() {
return this.toString();
};
/**
* @return {string}
*/
recoil.structs.table.CombinedColumnKey.prototype.getId = function() {
return this.toString();
};
/**
* @return {string}
*/
recoil.structs.table.CombinedColumnKey.prototype.toString = function() {
return '[' + this.subKeys_.map(function(c) {return c.toString();}).join(',') + ']';
};
/**
* @param {*} a
* @return {T}
*/
recoil.structs.table.CombinedColumnKey.prototype.castTo = function(a) {
var res = [];
for (var i = 0; i < this.subKeys_.length; i++) {
res.push(this.subKeys_[i].castTo(a[i]));
}
return res;
};
/**
* @return {string}
*/
recoil.structs.table.CombinedColumnKey.prototype.getName = function() {
return this.name_;
};
/**
* construct a table which cannot change, provide a mutable table to get the value
* @param {recoil.structs.table.MutableTable} table
* @constructor
* @implements {recoil.structs.table.TableInterface}
*/
recoil.structs.table.Table = function(table) {
this.meta_ = {};
goog.object.extend(this.meta_, table.meta_);
this.columnMeta_ = {};
goog.object.extend(this.columnMeta_, table.columnMeta_);
this.primaryColumns_ = table.primaryColumns_;
this.otherColumns_ = table.otherColumns_;
this.rows_ = new goog.structs.AvlTree(table.rows_.comparator_);
this.ordered_ = new goog.structs.AvlTree(recoil.structs.table.TableRow.positionComparator_(table.rows_.comparator_));
var me = this;
table.rows_.inOrderTraverse(function(x) {
me.rows_.add(x);
me.ordered_.add(x);
});
};
/**
* very efficent way of setting the meta on a table
* it doesn't change this table but returns an new table
* with new meta
* @param {!Object} meta
* @return {!recoil.structs.table.Table}
*/
recoil.structs.table.Table.prototype.addMeta = function(meta) {
var res = new recoil.structs.table.Table(new recoil.structs.table.MutableTable(this.primaryColumns_, this.otherColumns_));
res.meta_ = goog.object.clone(this.meta_);
recoil.util.object.addProps(res.meta_, meta);
res.columnMeta_ = this.columnMeta_;
res.rows_ = this.rows_;
res.ordered_ = this.ordered_;
return res;
};
/**
* @param {?} b
* @return {number}
*/
recoil.structs.table.Table.prototype.compare = function(b) {
if (b instanceof recoil.structs.table.Table) {
var res = this.size() - b.size();
if (res !== 0) {
return res;
}
res = recoil.util.object.compareAll([
{x: this.getMeta(), y: b.getMeta()},
{x: this.primaryColumns_, y: b.primaryColumns_},
{x: this.otherColumns_, y: b.otherColumns_},
{x: this.ordered_, y: b.ordered_}]
);
if (res !== 0) {
return res;
}
//ok we don't compare lengths since we already compared the primary and other columns
//however it might be good if we ignore order of other columns
var cols = this.getColumns();
for (var i = 0; i < cols.length; i++) {
var col = cols[i];
res = recoil.util.object.compare(this.getColumnMeta(col), b.getColumnMeta(col));
if (res !== 0) {
return res;
}
}
return 0;
}
return -1;
};
/**
* @param {?} a
* @return {boolean}
*/
recoil.structs.table.Table.prototype.equals = function(a) {
return this.compare(a) === 0;
};
/**
* @return {!Array<!recoil.structs.table.ColumnKey>}
*/
recoil.structs.table.Table.prototype.getPrimaryColumns = function() {
return this.primaryColumns_;
};
/**
* @return {!Array<!recoil.structs.table.ColumnKey>}
*/
recoil.structs.table.Table.prototype.getOtherColumns = function() {
return this.otherColumns_;
};
/**
* convert to a mutable table
* @return {!recoil.structs.table.MutableTable}
*/
recoil.structs.table.Table.prototype.unfreeze = function() {
var res = new recoil.structs.table.MutableTable(this.primaryColumns_, this.otherColumns_);
recoil.util.object.addProps(res.meta_, this.meta_);
res.columnMeta_ = {};
recoil.util.object.addProps(res.columnMeta_, this.columnMeta_);
this.rows_.inOrderTraverse(function(row) {
res.addRow(row);
});
return res;
};
/**
* creates an empty mutable table with the same columns
* @param {IArrayLike<!recoil.structs.table.ColumnKey>} cols
* @return {!recoil.structs.table.MutableTable}
*/
recoil.structs.table.Table.prototype.createEmptyKeep = function(cols) {
var remove = [];
var seen = {};
cols.forEach(function(col) {
seen[col.getId()] = true;
});
this.otherColumns_.forEach(function(c) {
if (!seen[c.getId()]) {
remove.push(c);
}
});
return this.createEmpty([], [], remove);
};
/**
* creates an empty mutable table with the same columns
* @param {IArrayLike<!recoil.structs.table.ColumnKey>=} opt_extPrimaryCols
* @param {IArrayLike<!recoil.structs.table.ColumnKey>=} opt_extOtherCols
* @param {IArrayLike<!recoil.structs.table.ColumnKey>=} opt_removeCols
* @return {!recoil.structs.table.MutableTable}
*/
recoil.structs.table.Table.prototype.createEmpty = function(opt_extPrimaryCols, opt_extOtherCols, opt_removeCols) {
var newPrimary = this.primaryColumns_.concat(opt_extPrimaryCols || []);
var seen = {};
var newOther = [];
// don't add already existing columns
this.otherColumns_.forEach(function(c) {
seen[c.getId()] = true;
newOther.push(c);
});
if (opt_extOtherCols) {
opt_extOtherCols.forEach(function(c) {
if (!seen[c.getId()]) {
newOther.push(c);
}
});
}
var removeMap = {};
if (opt_removeCols) {
opt_removeCols.forEach(function(c) {
removeMap[c] = true;
});
}
var doRemove = function(arr) {
for (var i = arr.length - 1; i >= 0; i--) {
if (removeMap[arr[i]]) {
arr.splice(i, 1);
}
}
};
if (opt_removeCols) {
doRemove(newPrimary);
doRemove(newOther);
}
var res = new recoil.structs.table.MutableTable(newPrimary,
newOther);
recoil.util.object.addProps(res.meta_, this.meta_);
res.columnMeta_ = {};
recoil.util.object.addProps(res.columnMeta_, this.columnMeta_);
if (opt_removeCols) {
opt_removeCols.forEach(function(col) {
delete res.columnMeta_[col];
});
}
return res;
};
/**
* creates an empty mutable table based on a table, that will a have all original columns
* but the primary keys will be the ones specified
* @param {!Array<!recoil.structs.table.ColumnKey>} primaryCols the new primary keys these can be new or existing
* @param {!Array<!recoil.structs.table.ColumnKey>} extOtherCols any extra columns that need to be added that are
* not primary keys
* @return {!recoil.structs.table.MutableTable}
*/
recoil.structs.table.Table.prototype.createEmptyAddCols = function(primaryCols, extOtherCols) {
var otherColumns = [];
var me = this;
this.primaryColumns_.forEach(function(val) {
if (!goog.array.contains(primaryCols, val)) {
otherColumns.push(val);
}
});
this.otherColumns_.forEach(function(val) {
if (!goog.array.contains(primaryCols, val)) {
otherColumns.push(val);
}
});
extOtherCols.forEach(function(val) {
if (!goog.array.contains(otherColumns, val)) {
otherColumns.push(val);
}
});
var res = new recoil.structs.table.MutableTable(primaryCols, otherColumns);
recoil.util.object.addProps(res.meta_, this.meta_);
res.columnMeta_ = {};
recoil.util.object.addProps(res.columnMeta_, this.columnMeta_);
return res;
};
/**
* given that this table has primary key that is a number
* it will generate a mutable table row with a primary key not aready in the table
* also if all the existing rows have an position then the position of the new row will
* be the last row
*
* @param {!recoil.structs.table.MutableTable|!recoil.structs.table.MutableTable} table
*
* @return {!recoil.structs.table.MutableTableRow}
*/
recoil.structs.table.Table.createUniqueIntPkRow = function(table) {
var primaryCols = table.getPrimaryColumns();
if (primaryCols.length !== 1) {
throw 'to generate pk you must have exactly 1 primary key';
}
var res = new recoil.structs.table.MutableTableRow();
var pos = 0;
var usedPks = new goog.structs.AvlTree();
table.forEach(function(row, key) {
if (pos !== undefined) {
var rowPos = row.pos();
if (rowPos === undefined) {
pos = undefined;
}
else if (pos < rowPos) {
pos = rowPos + 1;
}
}
if (typeof(key[0]) !== 'number') {
throw 'cannot generate primary key on non number field';
}
usedPks.add(key[0]);
});
var curPk = 0;
usedPks.inOrderTraverse(function(val) {
if (curPk < val) {
return true;
}
if (curPk === val) {
curPk++;
}
return false;
});
res.set(primaryCols[0], curPk);
res.setPos(pos);
return res;
};
/**
*
* @param {!Array<!recoil.structs.table.ColumnKey>} primaryKeys
* @param {!Array<!recoil.structs.table.ColumnKey>} otherColumns
* @constructor
* @implements {recoil.structs.table.TableInterface}
* @template T
*
*/
recoil.structs.table.MutableTable = function(primaryKeys, otherColumns) {
this.meta_ = {}; // table meta data
this.columnMeta_ = {}; // column meta data
if (primaryKeys.length === 0) {
this.primaryColumns_ = [recoil.structs.table.ColumnKey.INDEX];
}
else {
this.primaryColumns_ = goog.array.clone(primaryKeys);
}
this.otherColumns_ = goog.array.clone(otherColumns);
var me = this;
var comparator = function(rowA, rowB) {
for (var key = 0; key < me.primaryColumns_.length; key++) {
var col = me.primaryColumns_[key];
var res = col.valCompare(rowA.get(col), rowB.get(col));
if (res !== 0) {
return res;
}
}
return 0;
};
this.rows_ = new goog.structs.AvlTree(comparator);
//recoil.structs.table.TableRow.positionComparator_ = function (comparator) {
this.ordered_ = new goog.structs.AvlTree(recoil.structs.table.TableRow.positionComparator_(comparator));
};
/**
* @return {recoil.structs.table.TableRow}
*/
recoil.structs.table.MutableTable.prototype.getFirstRow = function() {
var res = null;
this.forEach(function(row) {
if (!res) {
res = row;
}
});
return res;
};
/**
* @return {!Array<!recoil.structs.table.ColumnKey<*>>}
*/
recoil.structs.table.MutableTable.prototype.getColumns = function() {
return goog.array.concat(this.primaryColumns_, this.otherColumns_);
};
/**
* @return {?} a more readable version of the table
*/
recoil.structs.table.MutableTable.prototype.toDebugObj = function() {
var tableOut = [];
var behaviour = this;
this.forEach(function(row) {
tableOut.push(row);
});
return {meta: this.meta_, colMeta: this.columnMeta_, tbl: tableOut};
};
/**
* @return {!Array<!recoil.structs.table.ColumnKey<*>>}
*/
recoil.structs.table.MutableTable.prototype.getPrimaryColumns = function() {
return this.primaryColumns_;
};
/**
* @return {!Array<!recoil.structs.table.ColumnKey>}
*/
recoil.structs.table.MutableTable.prototype.getOtherColumns = function() {
return this.otherColumns_;
};
/**
* this ensures the sort order, the parameters to the function are columnkey and column meta data
*
* @param {function(!recoil.structs.table.ColumnKey,!Object) : *} func
*/
recoil.structs.table.MutableTable.prototype.forEachPlacedColumn = function(func) {
var cols = [];
var me = this;
var addCol = function(key) {
var col = me.columnMeta_[key.getId()];
if (col && col.position !== undefined) {
cols.push({meta: col, key: key});
}
};
this.primaryColumns_.forEach(addCol);
this.otherColumns_.forEach(addCol);
goog.array.sort(cols, function(x, y) {
return x.meta.position - y.meta.position;
});
cols.forEach(function(col) {
func(col.key, col.meta);
});
};
/**
* @param {function(!recoil.structs.table.ColumnKey,!Object) : *} func
*/
recoil.structs.table.MutableTable.prototype.forEachColumn = function(func) {
var cols = [];
var me = this;
var addCol = function(key) {
var col = me.columnMeta_[key.getId()];
cols.push({meta: col, key: key});
};
this.primaryColumns_.forEach(addCol);
this.otherColumns_.forEach(addCol);
cols.forEach(function(col) {
if (col.key !== recoil.structs.table.ColumnKey.ROW_META) {
func(col.key, col.meta || {});
}
func(col.key, col.meta);
});
};
/**
* @param {!recoil.structs.table.MutableTableRow|recoil.structs.table.TableRow} row
* @return {!Array<?>}
*/
recoil.structs.table.MutableTable.prototype.getRowKey = function(row) {
var res = [];
for (var i = 0; i < this.primaryColumns_.length; i++) {
res.push(row.get(this.primaryColumns_[i]));
}
return res;
};
/**
* gets the number of rows in the table
* @return {number}
*/
recoil.structs.table.MutableTable.prototype.size = function() {
return this.rows_.getCount();
};
/**
* @param {Object} a
* @param {Object} b
* @return {number}
*/
recoil.structs.table.Table.comparator = function(a, b) {
return recoil.structs.table.ColumnKey.comparator(a.key, b.key);
};
/**
* set meta data to already existing meta data, this will replace all existing meta
* data
* @param {!Object} meta
*/
recoil.structs.table.MutableTable.prototype.setMeta = function(meta) {
this.meta_ = goog.object.clone(meta);
};
/**
* get table meta data
* @return {!Object}
*/
recoil.structs.table.MutableTable.prototype.getMeta = function() {
return this.meta_;
};
/**
* add meta data to already existing meta data, this may override existing meta
* data
* @param {!Object} meta
*/
recoil.structs.table.MutableTable.prototype.addMeta = function(meta) {
var newMeta = goog.object.clone(this.meta_);
recoil.util.object.addProps(newMeta, this.meta_, meta);
this.meta_ = goog.object.clone(newMeta);
};
/**
* get column meta data
* @param {recoil.structs.table.ColumnKey} key
* @return {!Object}
*/
recoil.structs.table.MutableTable.prototype.getColumnMeta = function(key) {
var res = this.columnMeta_[key];
if (res === undefined) {
return {};
}
return res;
};
/**
* set new column meta data replacing already existing meta data there
* if it is not overriden by the new meta data
* @param {!recoil.structs.table.ColumnKey} key
* @param {!Object} meta
*/
recoil.structs.table.MutableTable.prototype.setColumnMeta = function(key, meta) {
this.columnMeta_[key] = goog.object.clone(meta);
};
/**
* add new column meta data leaving already existing meta data there
* if it is not overriden by the new meta data
* @param {!recoil.structs.table.ColumnKey} key
* @param {!Object|{position:number}} meta
*/
recoil.structs.table.MutableTable.prototype.addColumnMeta = function(key, meta) {
var newMeta = goog.object.clone(this.getColumnMeta(key));
for (var field in meta) {
newMeta[field] = meta[field];
}
this.setColumnMeta(key, newMeta);
};
/**
* get row meta data
* @param {!Array<?>} keys
* @return {!Object}
*/
recoil.structs.table.MutableTable.prototype.getRowMeta = function(keys) {
var row = this.getRow(keys);
if (row === null) {
return {};
}
return row.getMeta();
};
/**
* set new column meta data replacing already existing meta data there
* if it is not overriden by the new meta data
* @param {!Array<?>} keys
* @param {!Object} meta
*/
recoil.structs.table.MutableTable.prototype.setRowMeta = function(keys, meta) {
this.setCell(
keys,
recoil.structs.table.ColumnKey.ROW_META,
new recoil.structs.table.TableCell(undefined, meta));
};
/**
* add new column meta data leaving already existing meta data there
* if it is not overriden by the new meta data
* @param {!Array<*>} keys
* @param {!Object} meta
*/
recoil.structs.table.MutableTable.prototype.addRowMeta = function(keys, meta) {
var newMeta = {};
recoil.util.object.addProps(newMeta, this.getRowMeta(keys), meta);
this.setRowMeta(keys, newMeta);
};
/**
* this uses the primary key of the row to insert the table
*
* @param {recoil.structs.table.TableRow<T> | recoil.structs.table.MutableTableRow<T>} row
*/
recoil.structs.table.MutableTable.prototype.addRow = function(row) {
var missingKeys = [];
if (row instanceof recoil.structs.table.MutableTableRow) {
row = row.freeze();
}
this.primaryColumns_.forEach(function(col) {
if (!row.hasColumn(col)) {
if (col.hasDefault()) {
row = row.set(col, col.getDefault());
}
else {
missingKeys.push(col);
}
}
});
this.otherColumns_.forEach(function(col) {
if (!row.hasColumn(col)) {
if (col.hasDefault()) {
row = row.set(col, col.getDefault());
}
else {
throw new Error('missing column: ' + col.getName());
}
}
});
if (missingKeys.length === 1
&& this.primaryColumns_.length === 1
&& this.primaryColumns_[0] === recoil.structs.table.ColumnKey.INDEX) {
var nextId;
if (this.rows_.getCount() === 0) {
nextId = goog.math.Long.getZero();
}
else {
nextId = this.rows_.getMaximum().get(recoil.structs.table.ColumnKey.INDEX).add(goog.math.Long.getOne());
}
row = row.set(recoil.structs.table.ColumnKey.INDEX, nextId);
}
else if (missingKeys.length > 0) {
throw 'Must specify All primary keys';
}
var tblRow = row.keepColumns(goog.array.concat(this.primaryColumns_, this.otherColumns_));
if (this.rows_.findFirst(tblRow) !== null) {
throw new Error('row already exists ');
}
this.rows_.add(tblRow);
this.ordered_.add(tblRow);
};
/**
* @private
* @param {Array<*>} keys
* @return {recoil.structs.table.TableRow} the key as a row so it can be used to lookup the value in the map
*/
recoil.structs.table.MutableTable.prototype.makeKeys_ = function(keys) {
if (keys.length !== this.primaryColumns_.length) {
throw 'Incorrect number of primary keys';
}
var row = new recoil.structs.table.MutableTableRow();
for (var i = 0; i < keys.length; i++) {
row.set(this.primaryColumns_[i], keys[i]);
}
return row.freeze();
};
/**
* returns an array of keys for the row
* @param {!recoil.structs.table.TableRow} row
* @return {!Array<?>}
*/
recoil.structs.table.MutableTable.prototype.getRowKeys = function(row) {
var keys = [];
for (var i = 0; i < this.primaryColumns_.length; i++) {
keys.push(row.get(this.primaryColumns_[i]));
}
return keys;
};
/**
*
* @param {function(!recoil.structs.table.TableRow,!Array<?>,Object) : *} func the first parametr is the row the, second is \
* the primary key, and the third is the rowMeta data
*/
recoil.structs.table.MutableTable.prototype.forEach = function(func) {
var me = this;
var list = [];
//construct a list first just incase we modify the
//table while iterating over it
this.ordered_.inOrderTraverse(function(row) {
list.push(row);
});
list.forEach(function(row) {
return func(row, me.getRowKeys(row), row.getMeta());
});
//var table = this.freeze();
//table.forEach(func);
};
/**
* like foreach but makes the row mutable, this is done often
* @param {function(!recoil.structs.table.MutableTableRow,!Array<?>,Object) : *} func the first parametr is the row the, second is \
* the primary key, and the third is the rowMeta data
*/
recoil.structs.table.MutableTable.prototype.forEachModify = function(func) {
var me = this;
var list = [];
//construct a list first just incase we modify the
//table while iterating over it
this.ordered_.inOrderTraverse(function(row) {
list.push(row.unfreeze());
});
list.forEach(function(row) {
return func(row, me.getRowKeys(row), row.getMeta());
});
};
/**
* this uses the primary key of the row to insert the table
*
* @param {Array<*>} keys
*
*/
recoil.structs.table.MutableTable.prototype.removeRow = function(keys) {
var oldRow = this.rows_.remove(this.makeKeys_(keys));
if (oldRow === null) {
throw 'Row does not exist';
}
else {
this.ordered_.remove(oldRow);
}
};
/**
* gets the row from a table, pass the primary keys as an array of values
* @param {!Array<?>} keys
* @return {recoil.structs.table.TableRow}
*/
recoil.structs.table.MutableTable.prototype.getRow = function(keys) {
var keyRow = recoil.structs.table.ColumnKey.normalizeColumns(this.primaryColumns_, keys);
return this.rows_.findFirst(keyRow);
};
/**
* gets the row from a table, pass the primary keys as an array of values
* @param {function(!recoil.structs.table.TableRow):boolean} compare
* @return {recoil.structs.table.TableRow}
*/
recoil.structs.table.MutableTable.prototype.findRow = function(compare) {
let res = null;
this.forEach(function(row) {
if (compare(row)) {
res = row;
}
});
return res;
};
/**
* Sets the value for the cell
* @template CT
* @param {!Array<?>} keys
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @param {CT} value
* @param {Object=} opt_meta
*/
recoil.structs.table.MutableTable.prototype.set = function(keys, column, value, opt_meta) {
var old = this.getCell(keys, column);
if (old === null) {
throw 'Cell Does not exist';
}
if (opt_meta) {
this.setCell(keys, column, new recoil.structs.table.TableCell(value, opt_meta));
}
else {
this.setCell(keys, column, old.setValue(value));
}
};
/**
* Sets the value and meta data for the cell
* @template CT
* @param {!Array<?>} keys
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @param {CT} value
*/
recoil.structs.table.MutableTable.prototype.setCell = function(keys, column, value) {
var row = this.getRow(keys);
if (row === null) {
throw 'row not found';
}
this.removeRow(keys);
this.addRow(row.setCell(column, value));
};
/**
* sets only the cell meta data, leave the value unchanged
* @template CT
* @param {!Array<?>} keys
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @param {!Object} meta
*/
recoil.structs.table.MutableTable.prototype.setCellMeta = function(keys, column, meta) {
var cell = this.getCell(keys, column);
if (cell === null) {
console.log('settin null');
}
this.setCell(keys, column, cell.setMeta(meta));
};
/**
* adds meta data to the cell
* @template CT
* @param {!Array<?>} keys
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @param {!Object} meta
*/
recoil.structs.table.MutableTable.prototype.addCellMeta = function(keys, column, meta) {
var cell = this.getCell(keys, column);
var newMeta = cell.getMeta();
recoil.util.object.addProps(newMeta, meta);
this.setCell(keys, column, cell.setMeta(meta));
};
/**
* Sets the value and meta data for the cell
* @param {!Array<?>} keys
* @param {!recoil.structs.table.TableRow|!recoil.structs.table.MutableTableRow} row
*/
recoil.structs.table.MutableTable.prototype.setRow = function(keys, row) {
var oldRow = this.getRow(keys);
if (oldRow === null) {
throw 'row not found';
}
this.removeRow(keys);
this.addRow(row);
<|fim▁hole|>/**
* @template CT
* @param {!Array<?>} keys
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @return {recoil.structs.table.TableCell<CT>}
*/
recoil.structs.table.MutableTable.prototype.getCell = function(keys, column) {
var row = this.getRow(keys);
if (row === null) {
return null;
}
return row.getCell(column);
};
/**
* @template CT
* @param {!Array<?>} keys
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @return {CT}
*/
recoil.structs.table.MutableTable.prototype.get = function(keys, column) {
var row = this.getRow(keys);
if (row === null) {
return null;
}
return row.get(column);
};
/**
* convert into immutable table
* @return {!recoil.structs.table.Table}
*/
recoil.structs.table.MutableTable.prototype.freeze = function() {
return new recoil.structs.table.Table(this);
};
/**
* gets the value of a cell in the table, without the meta information
* @template CT
* @param {!Array<?>} keys primary key of the row
* @param {recoil.structs.table.ColumnKey<CT>} columnKey
* @return {CT}
*/
recoil.structs.table.Table.prototype.get = function(keys, columnKey) {
var r = this.getRow(keys);
if (r == null) {
return null;
}
return r.get(columnKey);
};
/**
* @return {!Object}
*/
recoil.structs.table.Table.prototype.getMeta = function() {
return recoil.util.object.clone(this.meta_);
};
/**
* @template CT
* @param {recoil.structs.table.ColumnKey<CT>} column
* @return {!Object} +
*/
recoil.structs.table.Table.prototype.getColumnMeta = function(column) {
var res = this.columnMeta_[column];
if (res === undefined) {
return {};
}
return res;
};
/**
* @template CT
* @param {!Array<?>} keys
* @param {recoil.structs.table.ColumnKey<CT>} column
* @return {!Object}
*/
recoil.structs.table.Table.prototype.getRowMeta = function(keys, column) {
var row = this.getRow(keys);
if (row === null) {
return {};
}
return row.getMeta();
};
/**
* returns an array of keys for the row
* @param {!recoil.structs.table.TableRow|!recoil.structs.table.MutableTableRow} row
* @return {!Array<?>}
*/
recoil.structs.table.Table.prototype.getRowKeys = function(row) {
var keys = [];
for (var i = 0; i < this.primaryColumns_.length; i++) {
keys.push(row.get(this.primaryColumns_[i]));
}
return keys;
};
/**
*
* @param {function(!recoil.structs.table.TableRow, !Array<?>, Object) : *} func
*/
recoil.structs.table.Table.prototype.forEach = function(func) {
var me = this;
this.ordered_.inOrderTraverse(function(row) {
return func(row, me.getRowKeys(row), row.getMeta());
});
};
/**
*
* @param {function(!recoil.structs.table.MutableTableRow, !Array<?>, Object) : *} func
*/
recoil.structs.table.Table.prototype.forEachModify = function(func) {
var me = this;
this.ordered_.inOrderTraverse(function(row) {
return func(row.unfreeze(), me.getRowKeys(row), row.getMeta());
});
};
/**
* @return {!Array<!recoil.structs.table.ColumnKey<*>>}
*/
recoil.structs.table.Table.prototype.getKeyColumns = function() {
return this.primaryColumns_;
};
/**
* @return {!Array<!recoil.structs.table.ColumnKey<*>>}
*/
recoil.structs.table.Table.prototype.getColumns = function() {
return goog.array.concat(this.primaryColumns_, this.otherColumns_);
};
/**
* this ensures the sort order, the parameters to the function are columnkey and column meta data
*
* @param {function(!recoil.structs.table.ColumnKey,!Object) : *} func
*/
recoil.structs.table.Table.prototype.forEachPlacedColumn = function(func) {
var cols = [];
var me = this;
var addCol = function(key) {
var col = me.columnMeta_[key.getId()];
if (col && col.position !== undefined) {
cols.push({meta: col, key: key});
}
};
this.primaryColumns_.forEach(addCol);
this.otherColumns_.forEach(addCol);
goog.array.sort(cols, function(x, y) {
return x.meta.position - y.meta.position;
});
cols.forEach(function(col) {
func(col.key, col.meta);
});
};
/**
* @return {?} a more readable version
*/
recoil.structs.table.Table.prototype.toDebugObj = function() {
var tableOut = [];
var behaviour = this;
this.forEach(function(row) {
tableOut.push(row);
});
return {meta: this.meta_, colMeta: this.columnMeta_, tbl: tableOut};
};
/**
* @param {function(!recoil.structs.table.ColumnKey,!Object) : *} func
*/
recoil.structs.table.Table.prototype.forEachColumn = function(func) {
var cols = [];
var me = this;
var addCol = function(key) {
var col = me.columnMeta_[key.getId()];
cols.push({meta: col, key: key});
};
this.primaryColumns_.forEach(addCol);
this.otherColumns_.forEach(addCol);
cols.forEach(function(col) {
func(col.key, col.meta || {});
});
};
/**
* gets the number of rows in the table
* @return {number}
*/
recoil.structs.table.Table.prototype.size = function() {
return this.rows_.getCount();
};
/**
* gets the row from a table, pass the primary keys as an array of values
* @param {!Array<?>} keys
* @return {recoil.structs.table.TableRow}
*/
recoil.structs.table.Table.prototype.getRow = function(keys) {
var keyRow = recoil.structs.table.ColumnKey.normalizeColumns(this.primaryColumns_, keys);
return this.rows_.findFirst(keyRow);
};
/**
* gets the row from a table, pass the primary keys as an array of values
* @param {function(!recoil.structs.table.TableRow):boolean} compare
* @return {recoil.structs.table.TableRow}
*/
recoil.structs.table.Table.prototype.findRow = function(compare) {
let res = null;
this.forEach(function(row) {
if (compare(row)) {
res = row;
}
});
return res;
};
/**
* get cell value with its associated meta information
*
* @template CT
* @param {!Array<?>} keys
* @param {!recoil.structs.table.ColumnKey<CT>} columnKey
* @return {recoil.structs.table.TableCell<CT>} an object containing the meta data and a value
*/
recoil.structs.table.Table.prototype.getCell = function(keys, columnKey) {
var rowInfo = this.getRow(keys);
if (rowInfo) {
return rowInfo.getCell(columnKey);
}
return null;
};
/**
* gets the cell meta including the column, table and row values
* @template CT
* @param {!Array<?>} keys
* @param {!recoil.structs.table.ColumnKey<CT>} col
* @return {!Object}
*/
recoil.structs.table.Table.prototype.getFullCellMeta = function(keys, col) {
var row = this.getRow(keys);
if (row) {
var meta = {};
goog.object.extend(meta, this.getMeta(),
row.getRowMeta(),
this.getColumnMeta(col), row.getCellMeta(col));
return meta;
}
return this.getMeta();
};
/**
* @return {recoil.structs.table.TableRow}
*/
recoil.structs.table.Table.prototype.getFirstRow = function() {
var res = null;
this.forEach(function(row) {
if (!res) {
res = row;
}
});
return res;
};
/**
*
* @param {Object} typeFactories
* @param {Object} tableMeta
* @param {Array<Object>} rawTable
* @param {boolean=} opt_ordered if true then it will enforce the order it rawtable came in
* @return {recoil.structs.table.Table}
*/
recoil.structs.table.Table.create = function(typeFactories, tableMeta, rawTable, opt_ordered) {
var keys = recoil.structs.table.Table.extractKeys_(tableMeta);
var tbl = new recoil.structs.table.MutableTable(keys.primaryKeys, keys.otherKeys);
tbl.setMeta({'typeFactories': typeFactories});
for (var tMeta in tableMeta) {
var colKey = tableMeta[tMeta].key;
tbl.setColumnMeta(colKey, tableMeta[tMeta]);
}
var i = 0;
rawTable.forEach(function(item) {
var row = new recoil.structs.table.MutableTableRow(opt_ordered === true ? i : undefined);
for (var tMeta in tableMeta) {
var colKey = tableMeta[tMeta].key;
row.set(colKey, item[tMeta]);
}
tbl.addRow(row);
i++;
});
return tbl.freeze();
};
/**
*
* @param {Object} tableMeta
* @return {Object}
* @private
*/
recoil.structs.table.Table.extractKeys_ = function(tableMeta) {
var primaryKeys = [];
var otherKeys = [];
for (var obj in tableMeta) {
if (tableMeta.hasOwnProperty(obj)) {
var val = tableMeta[obj];
if (val.hasOwnProperty('primary')) {
primaryKeys.push(val);
}
else {
otherKeys.push(val.key);
}
}
}
/**
* @suppress {missingProperties}
* @param {?} a
* @param {?} b
* @return {number}
*/
var comp = function(a, b) {
return a.primary - b.primary;
};
primaryKeys.sort(comp);
return {primaryKeys: recoil.structs.table.Table.getColumnKeys_(primaryKeys),
otherKeys: otherKeys};
};
/**
* @private
* @param {Array<Object>} array
* @return {!Array<!recoil.structs.table.ColumnKey>}
*/
recoil.structs.table.Table.getColumnKeys_ = function(array) {
var res = [];
for (var i = 0; i < array.length; i++) {
res.push(array[i].key);
}
return res;
};
/**
* @param {!recoil.structs.table.MutableTableRow=} opt_tableRow
* @constructor
* @implements {recoil.structs.table.TableRowInterface}
*/
recoil.structs.table.TableRow = function(opt_tableRow) {
this.cells_ = {};
this.cells_[recoil.structs.table.ColumnKey.ROW_META] = new recoil.structs.table.TableCell(undefined, {});
this.pos_ = undefined;
if (opt_tableRow !== undefined) {
for (var key in opt_tableRow.orig_) {
this.cells_[key] = opt_tableRow.orig_[key];
}
for (key in opt_tableRow.changed_) {
this.cells_[key] = opt_tableRow.changed_[key];
}
this.pos_ = opt_tableRow.pos();
}
};
/**
* @private
* @param {function (!recoil.structs.table.TableRow, !recoil.structs.table.TableRow):number} comparator
* @return {function (!recoil.structs.table.TableRow, !recoil.structs.table.TableRow):number}
*/
recoil.structs.table.TableRow.positionComparator_ = function(comparator) {
return function(x, y) {
if (x.pos() === undefined && y.pos() === undefined) {
return comparator(x, y);
}
if (x.pos() === undefined || y.pos() === undefined) {
return x.pos() === undefined ? -1 : 1;
}
var res = x.pos() - y.pos();
if (res === 0) {
return comparator(x, y);
}
return res;
};
};
/**
* checks to see if the values are equal ignoring meta data
* @param {?} that
* @return {boolean}
*/
recoil.structs.table.TableRow.prototype.valuesEqual = function(that) {
if (!(that instanceof recoil.structs.table.TableRow)) {
return false;
}
var equal = true;
var me = this;
this.forEachColumn(function(col, cell) {
if (!that.cells_.hasOwnProperty(col) || !that.cells_[col]) {
equal = false;
return true;
}
if (!recoil.util.object.isEqual(cell.getValue(), that.cells_[col].getValue())) {
equal = false;
return true;
}
return false;
});
that.forEachColumn(function(col, cell) {
if (!me.cells_.hasOwnProperty(col)) {
equal = false;
return true;
}
return false;
});
return equal;
};
/**
* @return {number|undefined}
*/
recoil.structs.table.TableRow.prototype.pos = function() {
return this.pos_;
};
/**
* Get the value and meta data from the cell
* @template CT
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @return {recoil.structs.table.TableCell<CT>}
*/
recoil.structs.table.TableRow.prototype.getCell = function(column) {
var res = this.cells_[column];
return res === undefined ? null : res;
};
/**
* Get the value and meta data from the cell
* @template CT
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @return {!Object}
*/
recoil.structs.table.TableRow.prototype.getCellMeta = function(column) {
var res = this.getCell(column);
return res ? res.getMeta() : {};
};
/**
* Get the value and meta data from the cell
* @template CT
* @return {!Object}
*/
recoil.structs.table.TableRow.prototype.getMeta = function() {
var res = this.cells_[recoil.structs.table.ColumnKey.ROW_META];
return res === undefined ? {} : res.getMeta();
};
/**
* @param {function(string,!recoil.structs.table.TableCell)} func
*/
recoil.structs.table.TableRow.prototype.forEachColumn = function(func) {
var metaCol = recoil.structs.table.ColumnKey.ROW_META.toString();
for (var col in this.cells_) {
if (metaCol !== col) {
if (func(col, this.cells_[col])) {
return;
}
}
}
};
/**
* Gets only the value from the cell
* @template CT
* @param {recoil.structs.table.ColumnKey<CT>} column
* @return {CT}
*/
recoil.structs.table.TableRow.prototype.get = function(column) {
var val = this.cells_[column];
return val === undefined ? null : val.getValue();
};
/**
* sets the cell and returns a new row, this row is unmodified
* @template CT
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @param {CT} value
* @return {!recoil.structs.table.TableRow}
*/
recoil.structs.table.TableRow.prototype.set = function(column, value) {
var mutable = new recoil.structs.table.MutableTableRow(this.pos(), this);
mutable.set(column, value);
return mutable.freeze();
};
/**
* sets the cell and returns a new row, this row is unmodified
* @template CT
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @param {!recoil.structs.table.TableCell<CT>} value
* @return {recoil.structs.table.TableRow}
*/
recoil.structs.table.TableRow.prototype.setCell = function(column, value) {
var mutable = new recoil.structs.table.MutableTableRow(this.pos(), this);
mutable.setCell(column, value);
return mutable.freeze();
};
/**
* @param {...*} var_args
* @return {recoil.structs.table.MutableTableRow}
*/
recoil.structs.table.TableRow.create = function(var_args) {
var mutableRow = new recoil.structs.table.MutableTableRow();
for (var i = 0; i < arguments.length; i += 2) {
mutableRow.set(arguments[i], arguments[i + 1]);
}
return mutableRow;
};
/**
* @param {number} pos the position of the row
* @param {...*} var_args
* @return {recoil.structs.table.MutableTableRow}
*/
recoil.structs.table.TableRow.createOrdered = function(pos, var_args) {
var mutableRow = new recoil.structs.table.MutableTableRow(pos);
for (var i = 0; i < arguments.length; i += 2) {
mutableRow.set(arguments[i], arguments[i + 1]);
}
return mutableRow;
};
/**
* @return {!Object}
*/
recoil.structs.table.TableRow.prototype.getRowMeta = function() {
var cell = this.getCell(recoil.structs.table.ColumnKey.ROW_META);
return cell ? cell.getMeta() : {};
};
/**
* removes all columsn not in the columns parameter
* @param {Array<!recoil.structs.table.ColumnKey>} columns
* @return {!recoil.structs.table.TableRow}
*/
recoil.structs.table.TableRow.prototype.keepColumns = function(columns) {
var mutable = new recoil.structs.table.MutableTableRow(this.pos_);
var me = this;
columns.forEach(function(col) {
if (me.hasColumn(col)) {
var val = me.getCell(col);
if (val !== null) {
mutable.setCell(col, val);
}
}
});
if (me.hasColumn(recoil.structs.table.ColumnKey.ROW_META)) {
mutable.setRowMeta(me.getRowMeta());
}
return mutable.freeze();
};
/**
* @template CT
* @param {recoil.structs.table.ColumnKey<CT>} column
* @return {CT}
*/
recoil.structs.table.TableRow.prototype.hasColumn = function(column) {
return this.cells_[column] !== undefined;
};
/**
* @return {!recoil.structs.table.MutableTableRow}
*/
recoil.structs.table.TableRow.prototype.unfreeze = function() {
return new recoil.structs.table.MutableTableRow(undefined, this);
};
/**
* A table row that can be changed. Use this to make a row then
* change it to a normal row
* @constructor
* @implements {recoil.structs.table.TableRowInterface}
* @param {number=} opt_position if the row is order specify this
* @param {recoil.structs.table.TableRow=} opt_immutable
*/
recoil.structs.table.MutableTableRow = function(opt_position, opt_immutable) {
if (opt_immutable) {
this.orig_ = opt_immutable.cells_;
this.pos_ = opt_position === undefined ? opt_immutable.pos() : opt_position;
}
else {
this.orig_ = {};
this.pos_ = opt_position === undefined ? undefined : opt_position;
}
this.changed_ = {};
};
/**
* Get the value and meta data from the cell
* @template CT
* @return {!Object}
*/
recoil.structs.table.MutableTableRow.prototype.getMeta = function() {
return this.getRowMeta();
};
/**
* @param {function(string,!recoil.structs.table.TableCell)} func
* if the function returns true the loop exist
*/
recoil.structs.table.MutableTableRow.prototype.forEachColumn = function(func) {
var metaCol = recoil.structs.table.ColumnKey.ROW_META.toString();
for (var col in this.changed_) {
if (metaCol !== col) {
if (func(col, this.changed_[col])) {
return;
}
}
}
for (col in this.orig_) {
if (metaCol !== col && !this.changed_[col]) {
if (func(col, this.orig_[col])) {
return;
}
}
}
};
/**
* @param {!recoil.structs.table.TableRowInterface} row
*/
recoil.structs.table.MutableTableRow.prototype.addColumns = function(row) {
var me = this;
row.forEachColumn(function(col, cell) {
me.changed_[col] = cell;
});
};
/**
* @param {?} that
* @return {boolean}
*/
recoil.structs.table.MutableTableRow.prototype.equals = function(that) {
if (!(that instanceof recoil.structs.table.MutableTableRow)) {
return false;
}
return recoil.util.object.isEqual(this.freeze(), that.freeze());
};
/**
* checks to see if the values are equal ignoring meta data
* @param {?} that
* @return {boolean}
*/
recoil.structs.table.MutableTableRow.prototype.valuesEqual = function(that) {
if (!(that instanceof recoil.structs.table.MutableTableRow)) {
return false;
}
return this.freeze().valuesEqual(that.freeze());
};
/**
* @return {number|undefined}
*/
recoil.structs.table.MutableTableRow.prototype.pos = function() {
return this.pos_;
};
/**
* @param {number|undefined} pos
*/
recoil.structs.table.MutableTableRow.prototype.setPos = function(pos) {
this.pos_ = pos;
};
/**
* @template CT
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @return {recoil.structs.table.TableCell<CT>}
*/
recoil.structs.table.MutableTableRow.prototype.getCell = function(column) {
if (this.changed_.hasOwnProperty(column)) {
return this.changed_[column];
}
var res = this.orig_[column];
if (res === undefined) {
return null;
}
return this.orig_[column];
};
/**
* @template CT
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @return {!Object}
*/
recoil.structs.table.MutableTableRow.prototype.getCellMeta = function(column) {
var res = this.getCell(column);
return res ? res.getMeta() : {};
};
/**
* @param {!Object} meta
*/
recoil.structs.table.MutableTableRow.prototype.setRowMeta = function(meta) {
var cell = this.getCell(recoil.structs.table.ColumnKey.ROW_META);
if (cell == null) {
cell = new recoil.structs.table.TableCell(undefined, {});
}
this.setCell(recoil.structs.table.ColumnKey.ROW_META, cell.setMeta(meta));
};
/**
* @param {!Object} meta
*/
recoil.structs.table.MutableTableRow.prototype.addRowMeta = function(meta) {
var newMeta = {};
recoil.util.object.addProps(newMeta, this.getRowMeta(), meta);
this.setRowMeta(newMeta);
};
/**
* @return {!Object}
*/
recoil.structs.table.MutableTableRow.prototype.getRowMeta = function() {
var cell = this.getCell(recoil.structs.table.ColumnKey.ROW_META);
return cell ? cell.getMeta() : {};
};
/**
* converts a mutable table row to immutable table row
* @return {!recoil.structs.table.TableRow}
*/
recoil.structs.table.MutableTableRow.prototype.freeze = function() {
return new recoil.structs.table.TableRow(this);
};
/**
* @template CT
* @param {!recoil.structs.table.ColumnKey<CT>} column
* @return {CT}
*/
recoil.structs.table.MutableTableRow.prototype.get = function(column) {
var res = this.getCell(column);
return res === null ? null : res.getValue();
};
/**
* @template CT
* @param {!recoil.structs.table.ColumnKey<CT>} columnKey
* @param {!recoil.structs.table.TableCell<CT>} val the data and meta data of the cell
*/
recoil.structs.table.MutableTableRow.prototype.setCell = function(columnKey, val) {
this.changed_[columnKey] = val;
};
/**
* a helper that just transfers the columns from the source rows
* into this row
* @param {!Array<!recoil.structs.table.ColumnKey>} columnKeys
* @param {!recoil.structs.table.TableRowInterface} src
*/
recoil.structs.table.MutableTableRow.prototype.transfer = function(columnKeys, src) {
for (var i = 0; i < columnKeys.length; i++) {
var col = columnKeys[i];
this.set(col, src.get(col));
}
};
/**
* @template CT
* @param {!recoil.structs.table.ColumnKey<CT>} columnKey
* @param {CT} val the data of the cell
* @param {Object=} opt_meta
*/
recoil.structs.table.MutableTableRow.prototype.set = function(columnKey, val, opt_meta) {
var old = this.getCell(columnKey);
if (old === null) {
old = new recoil.structs.table.TableCell(undefined);
}
if (opt_meta) {
this.setCell(columnKey, new recoil.structs.table.TableCell(
columnKey.castTo(val), opt_meta));
}
else {
this.setCell(columnKey, old.setValue(columnKey.castTo(val)));
}
};
/**
* @template CT
* @param {!recoil.structs.table.ColumnKey<CT>} columnKey
* @param {!Object} val the data of the cell
*/
recoil.structs.table.MutableTableRow.prototype.setCellMeta = function(columnKey, val) {
var old = this.getCell(columnKey);
if (old === null) {
old = new recoil.structs.table.TableCell(undefined);
}
this.setCell(columnKey, old.setMeta(val));
};
/**
* @template CT
* @param {!recoil.structs.table.ColumnKey<CT>} columnKey
* @param {!Object} val the data of the cell
*/
recoil.structs.table.MutableTableRow.prototype.addCellMeta = function(columnKey, val) {
var old = this.getCell(columnKey);
if (old === null) {
old = new recoil.structs.table.TableCell(undefined);
}
this.setCell(columnKey, old.addMeta(val));
};
/**
*
* @template T
* @param {T} value
* @param {Object=} opt_meta
* @constructor
*/
recoil.structs.table.TableCell = function(value, opt_meta) {
this.value_ = value;
this.meta_ = opt_meta;
};
/**
* @return {!Object}
*/
recoil.structs.table.TableCell.prototype.getMeta = function() {
return !this.meta_ ? {} : goog.object.clone(this.meta_);
};
/**
* @return {T}
*/
recoil.structs.table.TableCell.prototype.getValue = function() {
return this.value_;
};
/**
* returns a new cell with the meta data set
* @param {!Object} meta
* @return {!recoil.structs.table.TableCell<T>}
*/
recoil.structs.table.TableCell.prototype.setMeta = function(meta) {
return new recoil.structs.table.TableCell(this.value_, meta);
};
/**
* returns a new cell with the meta data set
* @param {!Object} meta
* @return {!recoil.structs.table.TableCell<T>}
*/
recoil.structs.table.TableCell.prototype.addMeta = function(meta) {
var newMeta = goog.object.clone(this.getMeta());
recoil.util.object.addProps(newMeta, meta);
return new recoil.structs.table.TableCell(this.value_, newMeta);
};
/**
* returns a new cell with the data set, keeps the metadata
* @param {T} value
* @return {!recoil.structs.table.TableCell<T>}
*/
recoil.structs.table.TableCell.prototype.setValue = function(value) {
return new recoil.structs.table.TableCell(value, this.meta_);
};<|fim▁end|> | };
|
<|file_name|>get.go<|end_file_name|><|fim▁begin|>// download the contents of a url
package main
import (
"fmt"
"io/ioutil"
"log"
"net/http"
)
func main() {
res, err := http.Get("http://www.google.com/robots.txt")
if err != nil {
log.Fatal(err)<|fim▁hole|> }
robots, err := ioutil.ReadAll(res.Body)
res.Body.Close()
if err != nil {
log.Fatal(err)
}
fmt.Printf("%s", robots)
}<|fim▁end|> | |
<|file_name|>options.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package options provides the flags used for the controller manager.
//
package options
import (
"fmt"
"strings"
"time"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
utilerrors "k8s.io/apimachinery/pkg/util/errors"
"k8s.io/apimachinery/pkg/util/sets"
utilfeature "k8s.io/apiserver/pkg/util/feature"
"k8s.io/kubernetes/pkg/apis/componentconfig"
"k8s.io/kubernetes/pkg/client/leaderelection"
"k8s.io/kubernetes/pkg/controller/garbagecollector"
"k8s.io/kubernetes/pkg/master/ports"
// add the kubernetes feature gates
_ "k8s.io/kubernetes/pkg/features"
"github.com/cloudflare/cfssl/helpers"
"github.com/spf13/pflag"
)
// CMServer is the main context object for the controller manager.
type CMServer struct {
componentconfig.KubeControllerManagerConfiguration
Master string
Kubeconfig string
}
// NewCMServer creates a new CMServer with a default config.
func NewCMServer() *CMServer {
gcIgnoredResources := make([]componentconfig.GroupResource, 0, len(garbagecollector.DefaultIgnoredResources()))
for r := range garbagecollector.DefaultIgnoredResources() {
gcIgnoredResources = append(gcIgnoredResources, componentconfig.GroupResource{Group: r.Group, Resource: r.Resource})
}
s := CMServer{
KubeControllerManagerConfiguration: componentconfig.KubeControllerManagerConfiguration{
Controllers: []string{"*"},
Port: ports.ControllerManagerPort,
Address: "0.0.0.0",
ConcurrentEndpointSyncs: 5,
ConcurrentServiceSyncs: 1,
ConcurrentRCSyncs: 5,
ConcurrentRSSyncs: 5,
ConcurrentDaemonSetSyncs: 2,
ConcurrentJobSyncs: 5,
ConcurrentResourceQuotaSyncs: 5,
ConcurrentDeploymentSyncs: 5,
ConcurrentNamespaceSyncs: 5,
ConcurrentSATokenSyncs: 5,
LookupCacheSizeForRC: 4096,
LookupCacheSizeForRS: 4096,
LookupCacheSizeForDaemonSet: 1024,
ServiceSyncPeriod: metav1.Duration{Duration: 5 * time.Minute},
RouteReconciliationPeriod: metav1.Duration{Duration: 10 * time.Second},
ResourceQuotaSyncPeriod: metav1.Duration{Duration: 5 * time.Minute},
NamespaceSyncPeriod: metav1.Duration{Duration: 5 * time.Minute},
PVClaimBinderSyncPeriod: metav1.Duration{Duration: 15 * time.Second},
HorizontalPodAutoscalerSyncPeriod: metav1.Duration{Duration: 30 * time.Second},
HorizontalPodAutoscalerUpscaleForbiddenWindow: metav1.Duration{Duration: 3 * time.Minute},
HorizontalPodAutoscalerDownscaleForbiddenWindow: metav1.Duration{Duration: 5 * time.Minute},
DeploymentControllerSyncPeriod: metav1.Duration{Duration: 30 * time.Second},
MinResyncPeriod: metav1.Duration{Duration: 12 * time.Hour},
RegisterRetryCount: 10,
PodEvictionTimeout: metav1.Duration{Duration: 5 * time.Minute},
NodeMonitorGracePeriod: metav1.Duration{Duration: 40 * time.Second},
NodeStartupGracePeriod: metav1.Duration{Duration: 60 * time.Second},
NodeMonitorPeriod: metav1.Duration{Duration: 5 * time.Second},
ClusterName: "kubernetes",
NodeCIDRMaskSize: 24,
ConfigureCloudRoutes: true,
TerminatedPodGCThreshold: 12500,
VolumeConfiguration: componentconfig.VolumeConfiguration{
EnableHostPathProvisioning: false,
EnableDynamicProvisioning: true,
PersistentVolumeRecyclerConfiguration: componentconfig.PersistentVolumeRecyclerConfiguration{
MaximumRetry: 3,
MinimumTimeoutNFS: 300,
IncrementTimeoutNFS: 30,
MinimumTimeoutHostPath: 60,
IncrementTimeoutHostPath: 30,
},
FlexVolumePluginDir: "/usr/libexec/kubernetes/kubelet-plugins/volume/exec/",
},
ContentType: "application/vnd.kubernetes.protobuf",
KubeAPIQPS: 20.0,
KubeAPIBurst: 30,
LeaderElection: leaderelection.DefaultLeaderElectionConfiguration(),
ControllerStartInterval: metav1.Duration{Duration: 0 * time.Second},
EnableGarbageCollector: true,
ConcurrentGCSyncs: 20,
GCIgnoredResources: gcIgnoredResources,
ClusterSigningCertFile: "/etc/kubernetes/ca/ca.pem",
ClusterSigningKeyFile: "/etc/kubernetes/ca/ca.key",
ClusterSigningDuration: metav1.Duration{Duration: helpers.OneYear},
ReconcilerSyncLoopPeriod: metav1.Duration{Duration: 60 * time.Second},
EnableTaintManager: true,
HorizontalPodAutoscalerUseRESTClients: false,
},
}
s.LeaderElection.LeaderElect = true
return &s
}
// AddFlags adds flags for a specific CMServer to the specified FlagSet
func (s *CMServer) AddFlags(fs *pflag.FlagSet, allControllers []string, disabledByDefaultControllers []string) {
fs.StringSliceVar(&s.Controllers, "controllers", s.Controllers, fmt.Sprintf(""+
"A list of controllers to enable. '*' enables all on-by-default controllers, 'foo' enables the controller "+
"named 'foo', '-foo' disables the controller named 'foo'.\nAll controllers: %s\nDisabled-by-default controllers: %s",
strings.Join(allControllers, ", "), strings.Join(disabledByDefaultControllers, ", ")))
fs.Int32Var(&s.Port, "port", s.Port, "The port that the controller-manager's http service runs on")
fs.Var(componentconfig.IPVar{Val: &s.Address}, "address", "The IP address to serve on (set to 0.0.0.0 for all interfaces)")
fs.BoolVar(&s.UseServiceAccountCredentials, "use-service-account-credentials", s.UseServiceAccountCredentials, "If true, use individual service account credentials for each controller.")
fs.StringVar(&s.CloudProvider, "cloud-provider", s.CloudProvider, "The provider for cloud services. Empty string for no provider.")
fs.StringVar(&s.CloudConfigFile, "cloud-config", s.CloudConfigFile, "The path to the cloud provider configuration file. Empty string for no configuration file.")
fs.Int32Var(&s.ConcurrentEndpointSyncs, "concurrent-endpoint-syncs", s.ConcurrentEndpointSyncs, "The number of endpoint syncing operations that will be done concurrently. Larger number = faster endpoint updating, but more CPU (and network) load")
fs.Int32Var(&s.ConcurrentServiceSyncs, "concurrent-service-syncs", s.ConcurrentServiceSyncs, "The number of services that are allowed to sync concurrently. Larger number = more responsive service management, but more CPU (and network) load")
fs.Int32Var(&s.ConcurrentRCSyncs, "concurrent_rc_syncs", s.ConcurrentRCSyncs, "The number of replication controllers that are allowed to sync concurrently. Larger number = more responsive replica management, but more CPU (and network) load")
fs.Int32Var(&s.ConcurrentRSSyncs, "concurrent-replicaset-syncs", s.ConcurrentRSSyncs, "The number of replica sets that are allowed to sync concurrently. Larger number = more responsive replica management, but more CPU (and network) load")
fs.Int32Var(&s.ConcurrentResourceQuotaSyncs, "concurrent-resource-quota-syncs", s.ConcurrentResourceQuotaSyncs, "The number of resource quotas that are allowed to sync concurrently. Larger number = more responsive quota management, but more CPU (and network) load")
fs.Int32Var(&s.ConcurrentDeploymentSyncs, "concurrent-deployment-syncs", s.ConcurrentDeploymentSyncs, "The number of deployment objects that are allowed to sync concurrently. Larger number = more responsive deployments, but more CPU (and network) load")
fs.Int32Var(&s.ConcurrentNamespaceSyncs, "concurrent-namespace-syncs", s.ConcurrentNamespaceSyncs, "The number of namespace objects that are allowed to sync concurrently. Larger number = more responsive namespace termination, but more CPU (and network) load")
fs.Int32Var(&s.ConcurrentSATokenSyncs, "concurrent-serviceaccount-token-syncs", s.ConcurrentSATokenSyncs, "The number of service account token objects that are allowed to sync concurrently. Larger number = more responsive token generation, but more CPU (and network) load")
// TODO(#43388): Remove the following flag 6 months after v1.6.0 is released.
fs.Int32Var(&s.LookupCacheSizeForRC, "replication-controller-lookup-cache-size", s.LookupCacheSizeForRC, "This flag is deprecated and will be removed in future releases. ReplicationController no longer requires a lookup cache.")
fs.MarkDeprecated("replication-controller-lookup-cache-size", "This flag is deprecated and will be removed in future releases. ReplicationController no longer requires a lookup cache.")
// TODO(#43388): Remove the following flag 6 months after v1.6.0 is released.
fs.Int32Var(&s.LookupCacheSizeForRS, "replicaset-lookup-cache-size", s.LookupCacheSizeForRS, "This flag is deprecated and will be removed in future releases. ReplicaSet no longer requires a lookup cache.")
fs.MarkDeprecated("replicaset-lookup-cache-size", "This flag is deprecated and will be removed in future releases. ReplicaSet no longer requires a lookup cache.")
// TODO(#43388): Remove the following flag 6 months after v1.6.0 is released.
fs.Int32Var(&s.LookupCacheSizeForDaemonSet, "daemonset-lookup-cache-size", s.LookupCacheSizeForDaemonSet, "This flag is deprecated and will be removed in future releases. DaemonSet no longer requires a lookup cache.")
fs.MarkDeprecated("daemonset-lookup-cache-size", "This flag is deprecated and will be removed in future releases. DaemonSet no longer requires a lookup cache.")
fs.DurationVar(&s.ServiceSyncPeriod.Duration, "service-sync-period", s.ServiceSyncPeriod.Duration, "The period for syncing services with their external load balancers")
fs.DurationVar(&s.NodeSyncPeriod.Duration, "node-sync-period", 0, ""+
"This flag is deprecated and will be removed in future releases. See node-monitor-period for Node health checking or "+
"route-reconciliation-period for cloud provider's route configuration settings.")
fs.MarkDeprecated("node-sync-period", "This flag is currently no-op and will be deleted.")
fs.DurationVar(&s.RouteReconciliationPeriod.Duration, "route-reconciliation-period", s.RouteReconciliationPeriod.Duration, "The period for reconciling routes created for Nodes by cloud provider.")
fs.DurationVar(&s.ResourceQuotaSyncPeriod.Duration, "resource-quota-sync-period", s.ResourceQuotaSyncPeriod.Duration, "The period for syncing quota usage status in the system")<|fim▁hole|> fs.StringVar(&s.VolumeConfiguration.PersistentVolumeRecyclerConfiguration.PodTemplateFilePathNFS, "pv-recycler-pod-template-filepath-nfs", s.VolumeConfiguration.PersistentVolumeRecyclerConfiguration.PodTemplateFilePathNFS, "The file path to a pod definition used as a template for NFS persistent volume recycling")
fs.Int32Var(&s.VolumeConfiguration.PersistentVolumeRecyclerConfiguration.MinimumTimeoutNFS, "pv-recycler-minimum-timeout-nfs", s.VolumeConfiguration.PersistentVolumeRecyclerConfiguration.MinimumTimeoutNFS, "The minimum ActiveDeadlineSeconds to use for an NFS Recycler pod")
fs.Int32Var(&s.VolumeConfiguration.PersistentVolumeRecyclerConfiguration.IncrementTimeoutNFS, "pv-recycler-increment-timeout-nfs", s.VolumeConfiguration.PersistentVolumeRecyclerConfiguration.IncrementTimeoutNFS, "the increment of time added per Gi to ActiveDeadlineSeconds for an NFS scrubber pod")
fs.StringVar(&s.VolumeConfiguration.PersistentVolumeRecyclerConfiguration.PodTemplateFilePathHostPath, "pv-recycler-pod-template-filepath-hostpath", s.VolumeConfiguration.PersistentVolumeRecyclerConfiguration.PodTemplateFilePathHostPath, "The file path to a pod definition used as a template for HostPath persistent volume recycling. This is for development and testing only and will not work in a multi-node cluster.")
fs.Int32Var(&s.VolumeConfiguration.PersistentVolumeRecyclerConfiguration.MinimumTimeoutHostPath, "pv-recycler-minimum-timeout-hostpath", s.VolumeConfiguration.PersistentVolumeRecyclerConfiguration.MinimumTimeoutHostPath, "The minimum ActiveDeadlineSeconds to use for a HostPath Recycler pod. This is for development and testing only and will not work in a multi-node cluster.")
fs.Int32Var(&s.VolumeConfiguration.PersistentVolumeRecyclerConfiguration.IncrementTimeoutHostPath, "pv-recycler-timeout-increment-hostpath", s.VolumeConfiguration.PersistentVolumeRecyclerConfiguration.IncrementTimeoutHostPath, "the increment of time added per Gi to ActiveDeadlineSeconds for a HostPath scrubber pod. This is for development and testing only and will not work in a multi-node cluster.")
fs.BoolVar(&s.VolumeConfiguration.EnableHostPathProvisioning, "enable-hostpath-provisioner", s.VolumeConfiguration.EnableHostPathProvisioning, "Enable HostPath PV provisioning when running without a cloud provider. This allows testing and development of provisioning features. HostPath provisioning is not supported in any way, won't work in a multi-node cluster, and should not be used for anything other than testing or development.")
fs.BoolVar(&s.VolumeConfiguration.EnableDynamicProvisioning, "enable-dynamic-provisioning", s.VolumeConfiguration.EnableDynamicProvisioning, "Enable dynamic provisioning for environments that support it.")
fs.StringVar(&s.VolumeConfiguration.FlexVolumePluginDir, "flex-volume-plugin-dir", s.VolumeConfiguration.FlexVolumePluginDir, "Full path of the directory in which the flex volume plugin should search for additional third party volume plugins.")
fs.Int32Var(&s.TerminatedPodGCThreshold, "terminated-pod-gc-threshold", s.TerminatedPodGCThreshold, "Number of terminated pods that can exist before the terminated pod garbage collector starts deleting terminated pods. If <= 0, the terminated pod garbage collector is disabled.")
fs.DurationVar(&s.HorizontalPodAutoscalerSyncPeriod.Duration, "horizontal-pod-autoscaler-sync-period", s.HorizontalPodAutoscalerSyncPeriod.Duration, "The period for syncing the number of pods in horizontal pod autoscaler.")
fs.DurationVar(&s.HorizontalPodAutoscalerUpscaleForbiddenWindow.Duration, "horizontal-pod-autoscaler-upscale-delay", s.HorizontalPodAutoscalerUpscaleForbiddenWindow.Duration, "The period since last upscale, before another upscale can be performed in horizontal pod autoscaler.")
fs.DurationVar(&s.HorizontalPodAutoscalerDownscaleForbiddenWindow.Duration, "horizontal-pod-autoscaler-downscale-delay", s.HorizontalPodAutoscalerDownscaleForbiddenWindow.Duration, "The period since last downscale, before another downscale can be performed in horizontal pod autoscaler.")
fs.DurationVar(&s.DeploymentControllerSyncPeriod.Duration, "deployment-controller-sync-period", s.DeploymentControllerSyncPeriod.Duration, "Period for syncing the deployments.")
fs.DurationVar(&s.PodEvictionTimeout.Duration, "pod-eviction-timeout", s.PodEvictionTimeout.Duration, "The grace period for deleting pods on failed nodes.")
fs.Float32Var(&s.DeletingPodsQps, "deleting-pods-qps", 0.1, "Number of nodes per second on which pods are deleted in case of node failure.")
fs.MarkDeprecated("deleting-pods-qps", "This flag is currently no-op and will be deleted.")
fs.Int32Var(&s.DeletingPodsBurst, "deleting-pods-burst", 0, "Number of nodes on which pods are bursty deleted in case of node failure. For more details look into RateLimiter.")
fs.MarkDeprecated("deleting-pods-burst", "This flag is currently no-op and will be deleted.")
fs.Int32Var(&s.RegisterRetryCount, "register-retry-count", s.RegisterRetryCount, ""+
"The number of retries for initial node registration. Retry interval equals node-sync-period.")
fs.MarkDeprecated("register-retry-count", "This flag is currently no-op and will be deleted.")
fs.DurationVar(&s.NodeMonitorGracePeriod.Duration, "node-monitor-grace-period", s.NodeMonitorGracePeriod.Duration,
"Amount of time which we allow running Node to be unresponsive before marking it unhealthy. "+
"Must be N times more than kubelet's nodeStatusUpdateFrequency, "+
"where N means number of retries allowed for kubelet to post node status.")
fs.DurationVar(&s.NodeStartupGracePeriod.Duration, "node-startup-grace-period", s.NodeStartupGracePeriod.Duration,
"Amount of time which we allow starting Node to be unresponsive before marking it unhealthy.")
fs.DurationVar(&s.NodeMonitorPeriod.Duration, "node-monitor-period", s.NodeMonitorPeriod.Duration,
"The period for syncing NodeStatus in NodeController.")
fs.StringVar(&s.ServiceAccountKeyFile, "service-account-private-key-file", s.ServiceAccountKeyFile, "Filename containing a PEM-encoded private RSA or ECDSA key used to sign service account tokens.")
fs.StringVar(&s.ClusterSigningCertFile, "cluster-signing-cert-file", s.ClusterSigningCertFile, "Filename containing a PEM-encoded X509 CA certificate used to issue cluster-scoped certificates")
fs.StringVar(&s.ClusterSigningKeyFile, "cluster-signing-key-file", s.ClusterSigningKeyFile, "Filename containing a PEM-encoded RSA or ECDSA private key used to sign cluster-scoped certificates")
fs.DurationVar(&s.ClusterSigningDuration.Duration, "experimental-cluster-signing-duration", s.ClusterSigningDuration.Duration, "The length of duration signed certificates will be given.")
fs.StringVar(&s.ApproveAllKubeletCSRsForGroup, "insecure-experimental-approve-all-kubelet-csrs-for-group", s.ApproveAllKubeletCSRsForGroup, "The group for which the controller-manager will auto approve all CSRs for kubelet client certificates.")
fs.BoolVar(&s.EnableProfiling, "profiling", true, "Enable profiling via web interface host:port/debug/pprof/")
fs.BoolVar(&s.EnableContentionProfiling, "contention-profiling", false, "Enable lock contention profiling, if profiling is enabled")
fs.StringVar(&s.ClusterName, "cluster-name", s.ClusterName, "The instance prefix for the cluster")
fs.StringVar(&s.ClusterCIDR, "cluster-cidr", s.ClusterCIDR, "CIDR Range for Pods in cluster.")
fs.StringVar(&s.ServiceCIDR, "service-cluster-ip-range", s.ServiceCIDR, "CIDR Range for Services in cluster.")
fs.Int32Var(&s.NodeCIDRMaskSize, "node-cidr-mask-size", s.NodeCIDRMaskSize, "Mask size for node cidr in cluster.")
fs.BoolVar(&s.AllocateNodeCIDRs, "allocate-node-cidrs", false,
"Should CIDRs for Pods be allocated and set on the cloud provider.")
fs.StringVar(&s.CIDRAllocatorType, "cidr-allocator-type", "RangeAllocator",
"Type of CIDR allocator to use")
fs.BoolVar(&s.ConfigureCloudRoutes, "configure-cloud-routes", true, "Should CIDRs allocated by allocate-node-cidrs be configured on the cloud provider.")
fs.StringVar(&s.Master, "master", s.Master, "The address of the Kubernetes API server (overrides any value in kubeconfig)")
fs.StringVar(&s.Kubeconfig, "kubeconfig", s.Kubeconfig, "Path to kubeconfig file with authorization and master location information.")
fs.StringVar(&s.RootCAFile, "root-ca-file", s.RootCAFile, "If set, this root certificate authority will be included in service account's token secret. This must be a valid PEM-encoded CA bundle.")
fs.StringVar(&s.ContentType, "kube-api-content-type", s.ContentType, "Content type of requests sent to apiserver.")
fs.Float32Var(&s.KubeAPIQPS, "kube-api-qps", s.KubeAPIQPS, "QPS to use while talking with kubernetes apiserver")
fs.Int32Var(&s.KubeAPIBurst, "kube-api-burst", s.KubeAPIBurst, "Burst to use while talking with kubernetes apiserver")
fs.DurationVar(&s.ControllerStartInterval.Duration, "controller-start-interval", s.ControllerStartInterval.Duration, "Interval between starting controller managers.")
fs.BoolVar(&s.EnableGarbageCollector, "enable-garbage-collector", s.EnableGarbageCollector, "Enables the generic garbage collector. MUST be synced with the corresponding flag of the kube-apiserver.")
fs.Int32Var(&s.ConcurrentGCSyncs, "concurrent-gc-syncs", s.ConcurrentGCSyncs, "The number of garbage collector workers that are allowed to sync concurrently.")
fs.Float32Var(&s.NodeEvictionRate, "node-eviction-rate", 0.1, "Number of nodes per second on which pods are deleted in case of node failure when a zone is healthy (see --unhealthy-zone-threshold for definition of healthy/unhealthy). Zone refers to entire cluster in non-multizone clusters.")
fs.Float32Var(&s.SecondaryNodeEvictionRate, "secondary-node-eviction-rate", 0.01, "Number of nodes per second on which pods are deleted in case of node failure when a zone is unhealthy (see --unhealthy-zone-threshold for definition of healthy/unhealthy). Zone refers to entire cluster in non-multizone clusters. This value is implicitly overridden to 0 if the cluster size is smaller than --large-cluster-size-threshold.")
fs.Int32Var(&s.LargeClusterSizeThreshold, "large-cluster-size-threshold", 50, "Number of nodes from which NodeController treats the cluster as large for the eviction logic purposes. --secondary-node-eviction-rate is implicitly overridden to 0 for clusters this size or smaller.")
fs.Float32Var(&s.UnhealthyZoneThreshold, "unhealthy-zone-threshold", 0.55, "Fraction of Nodes in a zone which needs to be not Ready (minimum 3) for zone to be treated as unhealthy. ")
fs.BoolVar(&s.DisableAttachDetachReconcilerSync, "disable-attach-detach-reconcile-sync", false, "Disable volume attach detach reconciler sync. Disabling this may cause volumes to be mismatched with pods. Use wisely.")
fs.DurationVar(&s.ReconcilerSyncLoopPeriod.Duration, "attach-detach-reconcile-sync-period", s.ReconcilerSyncLoopPeriod.Duration, "The reconciler sync wait time between volume attach detach. This duration must be larger than one second, and increasing this value from the default may allow for volumes to be mismatched with pods.")
fs.BoolVar(&s.EnableTaintManager, "enable-taint-manager", s.EnableTaintManager, "WARNING: Beta feature. If set to true enables NoExecute Taints and will evict all not-tolerating Pod running on Nodes tainted with this kind of Taints.")
fs.BoolVar(&s.HorizontalPodAutoscalerUseRESTClients, "horizontal-pod-autoscaler-use-rest-clients", s.HorizontalPodAutoscalerUseRESTClients, "WARNING: alpha feature. If set to true, causes the horizontal pod autoscaler controller to use REST clients through the kube-aggregator, instead of using the legacy metrics client through the API server proxy. This is required for custom metrics support in the horizonal pod autoscaler.")
leaderelection.BindFlags(&s.LeaderElection, fs)
utilfeature.DefaultFeatureGate.AddFlag(fs)
}
// Validate is used to validate the options and config before launching the controller manager
func (s *CMServer) Validate(allControllers []string, disabledByDefaultControllers []string) error {
var errs []error
allControllersSet := sets.NewString(allControllers...)
for _, controller := range s.Controllers {
if controller == "*" {
continue
}
if strings.HasPrefix(controller, "-") {
controller = controller[1:]
}
if !allControllersSet.Has(controller) {
errs = append(errs, fmt.Errorf("%q is not in the list of known controllers", controller))
}
}
return utilerrors.NewAggregate(errs)
}<|fim▁end|> | fs.DurationVar(&s.NamespaceSyncPeriod.Duration, "namespace-sync-period", s.NamespaceSyncPeriod.Duration, "The period for syncing namespace life-cycle updates")
fs.DurationVar(&s.PVClaimBinderSyncPeriod.Duration, "pvclaimbinder-sync-period", s.PVClaimBinderSyncPeriod.Duration, "The period for syncing persistent volumes and persistent volume claims")
fs.DurationVar(&s.MinResyncPeriod.Duration, "min-resync-period", s.MinResyncPeriod.Duration, "The resync period in reflectors will be random between MinResyncPeriod and 2*MinResyncPeriod") |
<|file_name|>append.js<|end_file_name|><|fim▁begin|>// A DOM operation helper. Append an Element to a parent
export default function append(parent, ...children) {
// Always select the first item of a list, similarly to jQuery
if (Array.isArray(parent)) {
parent = parent[0]
}<|fim▁hole|> children.forEach(parent.appendChild, parent)
return parent
}<|fim▁end|> | |
<|file_name|>NicknameActions.ts<|end_file_name|><|fim▁begin|>import { NICKNAME_SET } from '../constants'
export interface NicknameSetPayload {
nickname: string
userId: string
}
export interface NicknameSetAction {
type: 'NICKNAME_SET'<|fim▁hole|>
export function setNickname(payload: NicknameSetPayload): NicknameSetAction {
return {
type: NICKNAME_SET,
payload,
}
}
export type NicknameActions = NicknameSetAction<|fim▁end|> | payload: NicknameSetPayload
} |
<|file_name|>comments.js<|end_file_name|><|fim▁begin|>import {COMMENT_REPLY} from './../bundles/comment/actions'
import { COMMENTS_RECEIVE } from './../bundles/thread/actions'
function comments(state = {}, action) {
switch (action.type) {
case COMMENTS_RECEIVE:
return {
...state,
...action.list
}<|fim▁hole|> // [action.id]: {
// ...state[action.id],
// _reply: !state[action.id]['_reply']
// }
// }
default:
return state
}
}
export default comments<|fim▁end|> | // case COMMENT_REPLY:
// return {
// ...state, |
<|file_name|>default.py<|end_file_name|><|fim▁begin|>"""
URLResolver Addon for Kodi
Copyright (C) 2016 t0mm0, tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import sys
from urlresolver.lib import kodi
from urlresolver.lib import log_utils
from urlresolver.lib import cache
from urlresolver.lib.url_dispatcher import URL_Dispatcher
url_dispatcher = URL_Dispatcher()
def __enum(**enums):
return type('Enum', (), enums)
MODES = __enum(AUTH_RD='auth_rd', RESET_RD='reset_rd', RESET_CACHE='reset_cache')
@url_dispatcher.register(MODES.AUTH_RD)
def auth_rd():
kodi.close_all()
kodi.sleep(500) # sleep or authorize won't work for some reason
from urlresolver.plugins import realdebrid
if realdebrid.RealDebridResolver().authorize_resolver():
kodi.notify(msg=kodi.i18n('rd_authorized'), duration=5000)
@url_dispatcher.register(MODES.RESET_RD)
def reset_rd():
kodi.close_all()
kodi.sleep(500) # sleep or reset won't work for some reason
from urlresolver.plugins import realdebrid
rd = realdebrid.RealDebridResolver()
rd.reset_authorization()
kodi.notify(msg=kodi.i18n('rd_auth_reset'), duration=5000)
@url_dispatcher.register(MODES.RESET_CACHE)
def reset_cache():
if cache.reset_cache():
kodi.notify(msg=kodi.i18n('cache_reset'))
else:
kodi.notify(msg=kodi.i18n('cache_reset_failed'))
def main(argv=None):
if sys.argv: argv = sys.argv
queries = kodi.parse_query(sys.argv[2])
log_utils.log('Version: |%s| Queries: |%s|' % (kodi.get_version(), queries))
log_utils.log('Args: |%s|' % (argv))
# don't process params that don't match our url exactly. (e.g. plugin://plugin.video.1channel/extrafanart)
plugin_url = 'plugin://%s/' % (kodi.get_id())
if argv[0] != plugin_url:
return
mode = queries.get('mode', None)
url_dispatcher.dispatch(mode, queries)
if __name__ == '__main__':<|fim▁hole|><|fim▁end|> | sys.exit(main()) |
<|file_name|>token.go<|end_file_name|><|fim▁begin|>package token
import(
"fmt"
)
type Token struct {
Type
Lit []byte
Pos
}
<|fim▁hole|> INVALID Type = iota
EOF
)
type Pos struct {
Offset int
Line int
Column int
}
func (this Pos) String() string {
return fmt.Sprintf("Pos(offset=%d, line=%d, column=%d)", this.Offset, this.Line, this.Column)
}
type TokenMap struct {
typeMap []string
idMap map[string]Type
}
func (this TokenMap) Id(tok Type) string {
if int(tok) < len(this.typeMap) {
return this.typeMap[tok]
}
return "unknown"
}
func (this TokenMap) Type(tok string) Type {
if typ, exist := this.idMap[tok]; exist {
return typ
}
return INVALID
}
func (this TokenMap) TokenString(tok *Token) string {
//TODO: refactor to print pos & token string properly
return fmt.Sprintf("%s(%d,%s)", this.Id(tok.Type), tok.Type, tok.Lit)
}
func (this TokenMap) StringType(typ Type) string {
return fmt.Sprintf("%s(%d)", this.Id(typ), typ)
}
var TokMap = TokenMap{
typeMap: []string{
"INVALID",
"$",
";",
"id",
"=",
"+",
"-",
"*",
"/",
"(",
")",
"int",
"float",
"string",
"${date:",
"date",
"}",
"${env:",
},
idMap: map[string]Type {
"INVALID": 0,
"$": 1,
";": 2,
"id": 3,
"=": 4,
"+": 5,
"-": 6,
"*": 7,
"/": 8,
"(": 9,
")": 10,
"int": 11,
"float": 12,
"string": 13,
"${date:": 14,
"date": 15,
"}": 16,
"${env:": 17,
},
}<|fim▁end|> | type Type int
const( |
<|file_name|>ir_actions.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, models
<|fim▁hole|>
@api.multi
def read(self, fields=None, context=None, load='_classic_read'):
actions = super(IRActionsWindow, self).read(fields=fields, load=load)
for action in actions:
if action.get('res_model', '') == 'res.partner':
# By default, only show standalone contact
action_context = action.get('context', '{}') or '{}'
if 'search_show_all_positions' not in action_context:
action['context'] = action_context.replace(
'{',
("{'search_show_all_positions': "
"{'is_set': True, 'set_value': False},"),
1)
return actions<|fim▁end|> |
class IRActionsWindow(models.Model):
_inherit = 'ir.actions.act_window' |
<|file_name|>allskymap.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
"""
AllSkyMap is a subclass of Basemap, specialized for handling common plotting
tasks for celestial data.
It is essentially equivalent to using Basemap with full-sphere projections
(e.g., 'hammer' or 'moll') and the `celestial` keyword set to `True`, but
it adds a few new methods:
* label_meridians for, well, labeling meridians with their longitude values;
* geodesic, a replacement for Basemap.drawgreatcircle, that can correctly
handle geodesics that cross the limb of the map, and providing the user
easy control over clipping (which affects thick lines at or near the limb);
* tissot, which overrides Basemap.tissot, correctly handling geodesics that
cross the limb of the map.
Created Jan 2011 by Tom Loredo, based on Jeff Whitaker's code in Basemap's
__init__.py module.
"""
from numpy import *
import matplotlib.pyplot as pl
from matplotlib.pyplot import *
from mpl_toolkits.basemap import Basemap
import pyproj
from pyproj import Geod
__all__ = ['AllSkyMap']
def angle_symbol(angle, round_to=1.0):
"""
Return a string representing an angle, rounded and with a degree symbol.
This is adapted from code in mpl's projections.geo module.
"""
value = np.round(angle / round_to) * round_to
if pl.rcParams['text.usetex'] and not pl.rcParams['text.latex.unicode']:
return r'$%0.0f^\circ$' % value
else:
return '%0.0f\N{DEGREE SIGN}' % value
class AllSkyMap(Basemap):
"""
AllSkyMap is a subclass of Basemap, specialized for handling common plotting
tasks for celestial data.
It is essentially equivalent to using Basemap with full-sphere projections
(e.g., 'hammer' or 'moll') and the `celestial` keyword set to `True`, but
it adds a few new methods:
* label_meridians for, well, labeling meridians with their longitude values;
* geodesic, a replacement for Basemap.drawgreatcircle, that can correctly
handle geodesics that cross the limb of the map, and providing the user
easy control over clipping (which affects thick lines at or near the
limb);
* tissot, which overrides Basemap.tissot, correctly handling geodesics that
cross the limb of the map.
"""
# Longitudes corresponding to east and west edges, reflecting the
# convention that 180 deg is the eastern edge, according to basemap's
# underlying projections:
east_lon = 180.
west_lon = 180.+1.e-10
def __init__(self,
projection='hammer',
lat_0=0., lon_0=0.,
suppress_ticks=True,
boundinglat=None,
fix_aspect=True,
anchor=str('C'),
ax=None):
if projection != 'hammer' and projection !='moll':
raise ValueError('Only hammer and moll projections supported!')
# Use Basemap's init, enforcing the values of many parameters that
# aren't used or whose Basemap defaults would not be altered for all-sky
# celestial maps.
Basemap.__init__(self, llcrnrlon=None, llcrnrlat=None,
urcrnrlon=None, urcrnrlat=None,
llcrnrx=None, llcrnry=None,
urcrnrx=None, urcrnry=None,
width=None, height=None,
projection=projection, resolution=None,
area_thresh=None, rsphere=1.,
lat_ts=None,
lat_1=None, lat_2=None,
lat_0=lat_0, lon_0=lon_0,
suppress_ticks=suppress_ticks,
satellite_height=1.,
boundinglat=None,
fix_aspect=True,
anchor=anchor,
celestial=True,
ax=ax)
# Keep a local ref to lon_0 for hemisphere checking.
self._lon_0 = self.projparams['lon_0']
self._limb = None
def drawmapboundary(self,color='k',linewidth=1.0,fill_color=None,\
zorder=None,ax=None):
"""
draw boundary around map projection region, optionally
filling interior of region.
.. tabularcolumns:: |l|L|
============== ====================================================
Keyword Description
============== ====================================================
linewidth line width for boundary (default 1.)
color color of boundary line (default black)
fill_color fill the map region background with this
color (default is no fill or fill with axis
background color).
zorder sets the zorder for filling map background
(default 0).
ax axes instance to use
(default None, use default axes instance).
============== ====================================================
returns matplotlib.collections.PatchCollection representing map boundary.
"""
# Just call the base class version, but keep a copy of the limb
# polygon for clipping.
self._limb = Basemap.drawmapboundary(self, color=color,
linewidth=linewidth, fill_color=fill_color, zorder=zorder, ax=ax)
return self._limb
def label_meridians(self, lons, fontsize=10, valign='bottom', vnudge=0,
halign='center', hnudge=0, color='black'):
"""
Label meridians with their longitude values in degrees.
This labels meridians with negative longitude l with the value 360-l;
for maps in celestial orientation, this means meridians to the right
of the central meridian are labeled from 360 to 180 (left to right).
`vnudge` and `hnudge` specify amounts in degress to nudge the labels
from their default placements, vertically and horizontally. This
values obey the map orientation, so to nudge to the right, use a
negative `hnudge` value.
"""
# Run through (lon, lat) pairs, with lat=0 in each pair.
lats = len(lons)*[0.]
for lon,lat in zip(lons, lats):
x, y = self(lon+hnudge, lat+vnudge)
if lon < 0:
lon_lbl = 360 + lon
else:
lon_lbl = lon
pl.text(x, y, angle_symbol(lon_lbl), fontsize=fontsize,
verticalalignment=valign,
horizontalalignment=halign,color=color)
def east_hem(self, lon):
"""
Return True if lon is in the eastern hemisphere of the map wrt lon_0.
"""
if (lon-self._lon_0) % 360. <= self.east_lon:
return True
else:
return False
def geodesic(self, lon1, lat1, lon2, lat2, del_s=.01, clip=True, **kwargs):
"""
Plot a geodesic curve from (lon1, lat1) to (lon2, lat2), with
points separated by arc length del_s. Return a list of Line2D
instances for the curves comprising the geodesic. If the geodesic does
not cross the map limb, there will be only a single curve; if it
crosses the limb, there will be two curves.
"""
# TODO: Perhaps return a single Line2D instance when there is only a
# single segment, and a list of segments only when there are two segs?
# TODO: Check the units of del_s.
# This is based on Basemap.drawgreatcircle (which draws an *arc* of a
# great circle), but addresses a limitation of that method, supporting
# geodesics that cross the map boundary by breaking them into two
# segments, one in the eastern hemisphere and the other in the western.
gc = pyproj.Geod(a=self.rmajor,b=self.rminor)
az12,az21,dist = gc.inv(lon1,lat1,lon2,lat2)
npoints = int((dist+0.5**del_s)/del_s)
# Calculate lon & lat for points on the arc.
lonlats = gc.npts(lon1,lat1,lon2,lat2,npoints)
lons = [lon1]; lats = [lat1]
for lon, lat in lonlats:
lons.append(lon)
lats.append(lat)
lons.append(lon2); lats.append(lat2)
# Break the arc into segments as needed, when there is a longitudinal
# hemisphere crossing.
segs = []
seg_lons, seg_lats = [lon1], [lat1]
cur_hem = self.east_hem(lon1)
for lon, lat in zip(lons[1:], lats[1:]):
if self.east_hem(lon) == cur_hem:
seg_lons.append(lon)
seg_lats.append(lat)
else:
# We should interpolate a new pt at the boundary, but in
# the mean time just rely on the step size being small.
segs.append( (seg_lons, seg_lats) )
seg_lons, seg_lats = [lon], [lat]
cur_hem = not cur_hem
segs.append( (seg_lons, seg_lats) )
# Plot each segment; return a list of the mpl lines.
lines = []
for lons, lats in segs:
x, y = self(lons, lats)
if clip and self._limb:
line = plot(x, y, clip_path=self._limb, **kwargs)[0]
else:
line = plot(x, y, **kwargs)[0]
lines.append(line)
# If there are multiple segments and no color args, reconcile the
# colors, which mpl will have autoset to different values.
# *** Does this screw up mpl's color set sequence for later lines?
if 'c' not in kwargs or 'color' in kwargs:
if len(lines) > 1:
c1 = lines[0].get_color()
for line in lines[1:]:
line.set_color(c1)
return lines
<|fim▁hole|> def tissot(self,lon_0,lat_0,radius_deg,npts,ax=None,**kwargs):
"""
Draw a polygon centered at ``lon_0,lat_0``. The polygon
approximates a circle on the surface of the earth with radius
``radius_deg`` degrees latitude along longitude ``lon_0``,
made up of ``npts`` vertices.
The polygon represents a Tissot's indicatrix
(http://en.wikipedia.org/wiki/Tissot's_Indicatrix),
which when drawn on a map shows the distortion inherent in the map
projection. Tissots can be used to display azimuthally symmetric
directional uncertainties ("error circles").
Extra keyword ``ax`` can be used to override the default axis instance.
Other \**kwargs passed on to matplotlib.patches.Polygon.
returns a list of matplotlib.patches.Polygon objects, with two polygons
when the tissot crosses the limb, and just one polygon otherwise.
"""
# TODO: Just return the polygon (not a list) when there is only one
# polygon? Or stick with the list for consistency?
# This is based on Basemap.tissot, but addresses a limitation of that
# method by handling tissots that cross the limb of the map by finding
# separate polygons in the eastern and western hemispheres comprising
# the tissot.
ax = kwargs.pop('ax', None) or self._check_ax()
g = pyproj.Geod(a=self.rmajor,b=self.rminor)
az12,az21,dist = g.inv(lon_0,lat_0,lon_0,lat_0+radius_deg)
start_hem = self.east_hem(lon_0)
segs1 = [self(lon_0,lat_0+radius_deg)]
over, segs2 = [], []
delaz = 360./npts
az = az12
last_lon = lon_0
# Note adjacent and opposite edge longitudes, in case the tissot
# runs over the edge.
if start_hem: # eastern case
adj_lon = self.east_lon
opp_lon = self.west_lon
else:
adj_lon = self.west_lon
opp_lon = self.east_lon
for n in range(npts):
az = az+delaz
# skip segments along equator (Geod can't handle equatorial arcs)
if np.allclose(0.,lat_0) and (np.allclose(90.,az) or np.allclose(270.,az)):
continue
else:
lon, lat, az21 = g.fwd(lon_0, lat_0, az, dist)
# If in the starting hemisphere, add to 1st polygon seg list.
if self.east_hem(lon) == start_hem:
x, y = self(lon, lat)
# Add segment if it is in the map projection region.
if x < 1.e20 and y < 1.e20:
segs1.append( (x, y) )
last_lon = lon
# Otherwise, we cross hemispheres.
else:
# Trace the edge of each hemisphere.
x, y = self(adj_lon, lat)
if x < 1.e20 and y < 1.e20:
segs1.append( (x, y) )
# We presume if adj projection is okay, opposite is.
segs2.append( self(opp_lon, lat) )
# Also store the overlap in the opposite hemisphere.
x, y = self(lon, lat)
if x < 1.e20 and y < 1.e20:
over.append( (x, y) )
last_lon = lon
poly1 = Polygon(segs1, **kwargs)
ax.add_patch(poly1)
if segs2:
over.reverse()
segs2.extend(over)
poly2 = Polygon(segs2, **kwargs)
ax.add_patch(poly2)
return [poly1, poly2]
else:
return [poly1]
if __name__ == '__main__':
# Note that Hammer & Mollweide projections enforce a 2:1 aspect ratio.
# Use figure size good for a 2:1 plot.
fig = figure(figsize=(12,6))
# Set up the projection and draw a grid.
map = AllSkyMap(projection='hammer')
# Save the bounding limb to use as a clip path later.
limb = map.drawmapboundary(fill_color='white')
map.drawparallels(np.arange(-75,76,15), linewidth=0.5, dashes=[1,2],
labels=[1,0,0,0], fontsize=9)
map.drawmeridians(np.arange(-150,151,30), linewidth=0.5, dashes=[1,2])
# Label a subset of meridians.
lons = np.arange(-150,151,30)
map.label_meridians(lons, fontsize=9, vnudge=1,
halign='left', hnudge=-1) # hnudge<0 shifts to right
# x, y limits are [0, 4*rt2], [0, 2*rt2].
rt2 = sqrt(2)
# Draw a slanted green line crossing the map limb.
line = plot([rt2,0], [rt2,2*rt2], 'g-')
# Draw a slanted magenta line crossing the map limb but clipped.
line = plot([rt2+.1,0+.1], [rt2,2*rt2], 'm-', clip_path=limb)
# Draw some geodesics.
# First a transparent thick blue geodesic crossing the limb but not clipped,
# overlayed by a thinner red geodesic that is clipped (by default), to
# illustrate the effect of clipping.
lines = map.geodesic(120, 30, 240, 60, clip=False, c='b', lw=7, alpha=.5)
lines = map.geodesic(240, 60, 120, 30, c='r', lw=3, alpha=.5)
# Next two large limb-crossing geodesics with the same path, but rendered
# in opposite directions, one transparent blue, the other transparent
# yellow. They should be right on top of each other, giving a greenish
# brown hue.
lines = map.geodesic(240, -60, 120, 30, c='b', lw=2, alpha=.5)
lines = map.geodesic(120, 30, 240, -60, c='y', lw=2, alpha=.5)
# What happens if a geodesic is given coordinates spanning more than
# a single rotation? Not sure what to expect, but it shoots off the
# map (clipped here). Perhaps we should ensure lons are in [0, 360].
#lines = map.geodesic(120, 20, 240+360, 50, del_s=.2, c='g')
# Two tissots fully within the limb.
poly = map.tissot(60, -15, 10, 100)
poly = map.tissot(280, 60, 10, 100)
#poly = map.tissot(90, -85, 10, 100)
# Limb-spanning tissots in each quadrant.
# lower left:
poly = map.tissot(170, -60, 15, 100)
# upper left:
poly = map.tissot(175, 70, 15, 100)
# upper right (note negative longitude):
poly = map.tissot(-175, 30, 15, 100, color='r', alpha=.6)
# lower right:
poly = map.tissot(185, -40, 10, 100)
# Plot the tissot centers as "+" symbols. Note the top left symbol
# would cross the limb without the clip_path argument; this might be
# desired to enhance visibility.
lons = [170, 175, -175, 185]
lats = [-60, 70, 30, -40]
x, y = map(lons, lats)
map.scatter(x, y, s=40, marker='+', linewidths=1, edgecolors='g',
facecolors='none', clip_path=limb, zorder=10) # hi zorder -> top
title('AllSkyMap demo: Clipped lines, markers, geodesics, tissots')
show()<|fim▁end|> | |
<|file_name|>shootout-k-nucleotide-pipes.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-android: FIXME(#10393)
// ignore-pretty very bad with line comments
// multi tasking k-nucleotide
#![feature(box_syntax)]
use std::ascii::{AsciiExt, OwnedAsciiExt};
use std::cmp::Ordering::{self, Less, Greater, Equal};
use std::collections::HashMap;
use std::mem::replace;
use std::num::Float;
use std::option;
use std::os;
use std::sync::mpsc::{channel, Sender, Receiver};
use std::thread::Thread;
fn f64_cmp(x: f64, y: f64) -> Ordering {
// arbitrarily decide that NaNs are larger than everything.
if y.is_nan() {
Less
} else if x.is_nan() {
Greater
} else if x < y {
Less
} else if x == y {
Equal
} else {
Greater
}
}
// given a map, print a sorted version of it
fn sort_and_fmt(mm: &HashMap<Vec<u8> , uint>, total: uint) -> String {
fn pct(xx: uint, yy: uint) -> f64 {
return (xx as f64) * 100.0 / (yy as f64);
}
// sort by key, then by value
fn sortKV(mut orig: Vec<(Vec<u8> ,f64)> ) -> Vec<(Vec<u8> ,f64)> {
orig.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b));
orig.sort_by(|&(_, a), &(_, b)| f64_cmp(b, a));
orig
}
let mut pairs = Vec::new();
// map -> [(k,%)]
for (key, &val) in mm.iter() {
pairs.push(((*key).clone(), pct(val, total)));
}
let pairs_sorted = sortKV(pairs);
let mut buffer = String::new();
for &(ref k, v) in pairs_sorted.iter() {
buffer.push_str(format!("{:?} {:0.3}\n",
k.to_ascii_uppercase(),
v).as_slice());
}
return buffer
}
// given a map, search for the frequency of a pattern
fn find(mm: &HashMap<Vec<u8> , uint>, key: String) -> uint {
let key = key.into_ascii_lowercase();
match mm.get(key.as_bytes()) {
option::Option::None => { return 0u; }
option::Option::Some(&num) => { return num; }
}
}
// given a map, increment the counter for a key
fn update_freq(mm: &mut HashMap<Vec<u8> , uint>, key: &[u8]) {
let key = key.to_vec();
let newval = match mm.remove(&key) {
Some(v) => v + 1,
None => 1
};
mm.insert(key, newval);
}
// given a Vec<u8>, for each window call a function
// i.e., for "hello" and windows of size four,
// run it("hell") and it("ello"), then return "llo"
fn windows_with_carry<F>(bb: &[u8], nn: uint, mut it: F) -> Vec<u8> where
F: FnMut(&[u8]),
{
let mut ii = 0u;
let len = bb.len();
while ii < len - (nn - 1u) {
it(&bb[ii..(ii+nn)]);
ii += 1u;
}
return bb[(len - (nn - 1u))..len].to_vec();
}
fn make_sequence_processor(sz: uint,
from_parent: &Receiver<Vec<u8>>,
to_parent: &Sender<String>) {
let mut freqs: HashMap<Vec<u8>, uint> = HashMap::new();
let mut carry = Vec::new();
let mut total: uint = 0u;
let mut line: Vec<u8>;
loop {
line = from_parent.recv().unwrap();
if line == Vec::new() { break; }
carry.push_all(line.as_slice());
carry = windows_with_carry(carry.as_slice(), sz, |window| {
update_freq(&mut freqs, window);
total += 1u;
});
}
let buffer = match sz {
1u => { sort_and_fmt(&freqs, total) }
2u => { sort_and_fmt(&freqs, total) }
3u => { format!("{}\t{}", find(&freqs, "GGT".to_string()), "GGT") }
4u => { format!("{}\t{}", find(&freqs, "GGTA".to_string()), "GGTA") }
6u => { format!("{}\t{}", find(&freqs, "GGTATT".to_string()), "GGTATT") }
12u => { format!("{}\t{}", find(&freqs, "GGTATTTTAATT".to_string()), "GGTATTTTAATT") }
18u => { format!("{}\t{}", find(&freqs, "GGTATTTTAATTTATAGT".to_string()),
"GGTATTTTAATTTATAGT") }
_ => { "".to_string() }
};
to_parent.send(buffer).unwrap();
}
// given a FASTA file on stdin, process sequence THREE
fn main() {
use std::io::{stdio, MemReader, BufferedReader};
let rdr = if os::getenv("RUST_BENCH").is_some() {
let foo = include_bytes!("shootout-k-nucleotide.data");
box MemReader::new(foo.to_vec()) as Box<Reader>
} else {
box stdio::stdin() as Box<Reader><|fim▁hole|> };
let mut rdr = BufferedReader::new(rdr);
// initialize each sequence sorter
let sizes = vec!(1u,2,3,4,6,12,18);
let mut streams = range(0, sizes.len()).map(|_| {
Some(channel::<String>())
}).collect::<Vec<_>>();
let mut from_child = Vec::new();
let to_child = sizes.iter().zip(streams.iter_mut()).map(|(sz, stream_ref)| {
let sz = *sz;
let stream = replace(stream_ref, None);
let (to_parent_, from_child_) = stream.unwrap();
from_child.push(from_child_);
let (to_child, from_parent) = channel();
Thread::spawn(move|| {
make_sequence_processor(sz, &from_parent, &to_parent_);
});
to_child
}).collect::<Vec<Sender<Vec<u8> >> >();
// latch stores true after we've started
// reading the sequence of interest
let mut proc_mode = false;
for line in rdr.lines() {
let line = line.unwrap().as_slice().trim().to_string();
if line.len() == 0u { continue; }
match (line.as_bytes()[0] as char, proc_mode) {
// start processing if this is the one
('>', false) => {
match line.as_slice().slice_from(1).find_str("THREE") {
Some(_) => { proc_mode = true; }
None => { }
}
}
// break our processing
('>', true) => { break; }
// process the sequence for k-mers
(_, true) => {
let line_bytes = line.as_bytes();
for (ii, _sz) in sizes.iter().enumerate() {
let lb = line_bytes.to_vec();
to_child[ii].send(lb).unwrap();
}
}
// whatever
_ => { }
}
}
// finish...
for (ii, _sz) in sizes.iter().enumerate() {
to_child[ii].send(Vec::new()).unwrap();
}
// now fetch and print result messages
for (ii, _sz) in sizes.iter().enumerate() {
println!("{:?}", from_child[ii].recv().unwrap());
}
}<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2013 Michal Čihař <[email protected]>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.shortcuts import render_to_response, get_object_or_404
from django.views.decorators.cache import cache_page
from weblate.trans import appsettings
from django.core.servers.basehttp import FileWrapper
from django.utils.translation import ugettext as _
import django.utils.translation
from django.template import RequestContext, loader
from django.http import (
HttpResponse, HttpResponseRedirect, HttpResponseNotFound, Http404
)
from django.contrib import messages
from django.contrib.auth.decorators import (
login_required, permission_required, user_passes_test
)
from django.contrib.auth.models import AnonymousUser
from django.db.models import Q, Count, Sum
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from django.utils.safestring import mark_safe
from weblate.trans.models import (
Project, SubProject, Translation, Unit, Suggestion, Check,
Dictionary, Change, Comment, get_versions
)
from weblate.lang.models import Language
from weblate.trans.checks import CHECKS
from weblate.trans.forms import (
TranslationForm, UploadForm, SimpleUploadForm, ExtraUploadForm, SearchForm,
MergeForm, AutoForm, WordForm, DictUploadForm, ReviewForm, LetterForm,
AntispamForm, CommentForm
)
from weblate.trans.util import join_plural
from weblate.accounts.models import Profile, send_notification_email
import weblate
from whoosh.analysis import StandardAnalyzer, StemmingAnalyzer
import datetime
import logging
import os.path
import json
import csv
from xml.etree import ElementTree
import urllib2
# See https://code.djangoproject.com/ticket/6027
class FixedFileWrapper(FileWrapper):
def __iter__(self):
self.filelike.seek(0)
return self
logger = logging.getLogger('weblate')
def home(request):
'''
Home page of Weblate showing list of projects, stats
and user links if logged in.
'''
projects = Project.objects.all_acl(request.user)
acl_projects = projects
if projects.count() == 1:
projects = SubProject.objects.filter(project=projects[0])
# Warn about not filled in username (usually caused by migration of
# users from older system
if not request.user.is_anonymous() and request.user.get_full_name() == '':
messages.warning(
request,
_('Please set your full name in your profile.')
)
# Load user translations if user is authenticated
usertranslations = None
if request.user.is_authenticated():
profile = request.user.get_profile()
usertranslations = Translation.objects.filter(
language__in=profile.languages.all()
).order_by(
'subproject__project__name', 'subproject__name'
)
# Some stats
top_translations = Profile.objects.order_by('-translated')[:10]
top_suggestions = Profile.objects.order_by('-suggested')[:10]
last_changes = Change.objects.filter(
translation__subproject__project__in=acl_projects,
).order_by( '-timestamp')[:10]
return render_to_response('index.html', RequestContext(request, {
'projects': projects,
'top_translations': top_translations,
'top_suggestions': top_suggestions,
'last_changes': last_changes,
'last_changes_rss': reverse('rss'),
'usertranslations': usertranslations,
}))
def show_checks(request):
'''
List of failing checks.
'''
allchecks = Check.objects.filter(
ignore=False
).values('check').annotate(count=Count('id'))
return render_to_response('checks.html', RequestContext(request, {
'checks': allchecks,
'title': _('Failing checks'),
}))
def show_check(request, name):
'''
Details about failing check.
'''
try:
check = CHECKS[name]
except KeyError:
raise Http404('No check matches the given query.')
checks = Check.objects.filter(
check=name, ignore=False
).values('project__slug').annotate(count=Count('id'))
return render_to_response('check.html', RequestContext(request, {
'checks': checks,
'title': check.name,
'check': check,
}))
def show_check_project(request, name, project):
'''
Show checks failing in a project.
'''
prj = get_object_or_404(Project, slug=project)
prj.check_acl(request)
try:
check = CHECKS[name]
except KeyError:
raise Http404('No check matches the given query.')
units = Unit.objects.none()
if check.target:
langs = Check.objects.filter(
check=name, project=prj, ignore=False
).values_list('language', flat=True).distinct()
for lang in langs:
checks = Check.objects.filter(
check=name, project=prj, language=lang, ignore=False
).values_list('checksum', flat=True)
res = Unit.objects.filter(
checksum__in=checks,
translation__language=lang,
translation__subproject__project=prj,
translated=True
).values(
'translation__subproject__slug',
'translation__subproject__project__slug'
).annotate(count=Count('id'))
units |= res
if check.source:
checks = Check.objects.filter(
check=name,
project=prj,
language=None,
ignore=False
).values_list(
'checksum', flat=True
)
for subproject in prj.subproject_set.all():
lang = subproject.translation_set.all()[0].language
res = Unit.objects.filter(
checksum__in=checks,
translation__language=lang,
translation__subproject=subproject
).values(
'translation__subproject__slug',
'translation__subproject__project__slug'
).annotate(count=Count('id'))
units |= res
return render_to_response('check_project.html', RequestContext(request, {
'checks': units,
'title': '%s/%s' % (prj.__unicode__(), check.name),
'check': check,
'project': prj,
}))
def show_check_subproject(request, name, project, subproject):
'''
Show checks failing in a subproject.
'''
subprj = get_object_or_404(
SubProject,
slug=subproject,
project__slug=project
)
subprj.check_acl(request)
try:<|fim▁hole|> raise Http404('No check matches the given query.')
units = Unit.objects.none()
if check.target:
langs = Check.objects.filter(
check=name,
project=subprj.project,
ignore=False
).values_list(
'language', flat=True
).distinct()
for lang in langs:
checks = Check.objects.filter(
check=name,
project=subprj.project,
language=lang,
ignore=False
).values_list('checksum', flat=True)
res = Unit.objects.filter(
translation__subproject=subprj,
checksum__in=checks,
translation__language=lang,
translated=True
).values(
'translation__language__code'
).annotate(count=Count('id'))
units |= res
source_checks = []
if check.source:
checks = Check.objects.filter(
check=name, project=subprj.project,
language=None,
ignore=False
).values_list('checksum', flat=True)
lang = subprj.translation_set.all()[0].language
res = Unit.objects.filter(
translation__subproject=subprj,
checksum__in=checks,
translation__language=lang
).count()
if res > 0:
source_checks.append(res)
return render_to_response(
'check_subproject.html',
RequestContext(request, {
'checks': units,
'source_checks': source_checks,
'anychecks': len(units) + len(source_checks) > 0,
'title': '%s/%s' % (subprj.__unicode__(), check.name),
'check': check,
'subproject': subprj,
})
)
def show_languages(request):
return render_to_response('languages.html', RequestContext(request, {
'languages': Language.objects.have_translation(),
'title': _('Languages'),
}))
def show_language(request, lang):
obj = get_object_or_404(Language, code=lang)
last_changes = Change.objects.filter(
translation__language=obj
).order_by('-timestamp')[:10]
dicts = Dictionary.objects.filter(
language=obj
).values_list('project', flat=True).distinct()
return render_to_response('language.html', RequestContext(request, {
'object': obj,
'last_changes': last_changes,
'last_changes_rss': reverse('rss-language', kwargs={'lang': obj.code}),
'dicts': Project.objects.filter(id__in=dicts),
}))
def show_dictionaries(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
dicts = Translation.objects.filter(
subproject__project=obj
).values_list('language', flat=True).distinct()
return render_to_response('dictionaries.html', RequestContext(request, {
'title': _('Dictionaries'),
'dicts': Language.objects.filter(id__in=dicts),
'project': obj,
}))
@login_required
@permission_required('trans.change_dictionary')
def edit_dictionary(request, project, lang):
prj = get_object_or_404(Project, slug=project)
prj.check_acl(request)
lang = get_object_or_404(Language, code=lang)
word = get_object_or_404(
Dictionary,
project=prj,
language=lang,
id=request.GET.get('id')
)
if request.method == 'POST':
form = WordForm(request.POST)
if form.is_valid():
word.source = form.cleaned_data['source']
word.target = form.cleaned_data['target']
word.save()
return HttpResponseRedirect(reverse(
'weblate.trans.views.show_dictionary',
kwargs={'project': prj.slug, 'lang': lang.code}
))
else:
form = WordForm(
initial={'source': word.source, 'target': word.target}
)
return render_to_response('edit_dictionary.html', RequestContext(request, {
'title': _('%(language)s dictionary for %(project)s') %
{'language': lang, 'project': prj},
'project': prj,
'language': lang,
'form': form,
}))
@login_required
@permission_required('trans.delete_dictionary')
def delete_dictionary(request, project, lang):
prj = get_object_or_404(Project, slug=project)
prj.check_acl(request)
lang = get_object_or_404(Language, code=lang)
word = get_object_or_404(
Dictionary,
project=prj,
language=lang,
id=request.POST.get('id')
)
word.delete()
return HttpResponseRedirect(reverse(
'weblate.trans.views.show_dictionary',
kwargs={'project': prj.slug, 'lang': lang.code})
)
@login_required
@permission_required('trans.upload_dictionary')
def upload_dictionary(request, project, lang):
prj = get_object_or_404(Project, slug=project)
prj.check_acl(request)
lang = get_object_or_404(Language, code=lang)
if request.method == 'POST':
form = DictUploadForm(request.POST, request.FILES)
if form.is_valid():
try:
count = Dictionary.objects.upload(
prj,
lang,
request.FILES['file'],
form.cleaned_data['overwrite']
)
if count == 0:
messages.warning(
request,
_('No words to import found in file.')
)
else:
messages.info(
request,
_('Imported %d words from file.') % count
)
except Exception as e:
messages.error(
request,
_('File content merge failed: %s' % unicode(e))
)
else:
messages.error(request, _('Failed to process form!'))
else:
messages.error(request, _('Failed to process form!'))
return HttpResponseRedirect(reverse(
'weblate.trans.views.show_dictionary',
kwargs={'project': prj.slug, 'lang': lang.code}
))
def download_dictionary(request, project, lang):
'''
Exports dictionary.
'''
prj = get_object_or_404(Project, slug=project)
prj.check_acl(request)
lang = get_object_or_404(Language, code=lang)
# Parse parameters
export_format = None
if 'format' in request.GET:
export_format = request.GET['format']
if not export_format in ['csv', 'po']:
export_format = 'csv'
# Grab all words
words = Dictionary.objects.filter(
project=prj,
language=lang
).order_by('source')
if export_format == 'csv':
response = HttpResponse(mimetype='text/csv; charset=utf-8')
filename = 'dictionary-%s-%s.csv' % (prj.slug, lang.code)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
writer = csv.writer(response)
for word in words.iterator():
writer.writerow((
word.source.encode('utf8'), word.target.encode('utf8')
))
return response
elif export_format == 'po':
from translate.storage.po import pounit, pofile
response = HttpResponse(mimetype='text/x-po; charset=utf-8')
filename = 'dictionary-%s-%s.po' % (prj.slug, lang.code)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
store = pofile()
site = Site.objects.get_current()
store.updateheader(
add=True,
language=lang.code,
x_generator='Weblate %s' % weblate.VERSION,
project_id_version='%s dictionary for %s' % (lang.name, prj.name),
language_team='%s <http://%s%s>' % (
lang.name,
site.domain,
reverse(
'weblate.trans.views.show_dictionary',
kwargs={'project': prj.slug, 'lang': lang.code}
),
)
)
for word in words.iterator():
unit = pounit(word.source)
unit.target = word.target
store.addunit(unit)
store.savefile(response)
return response
def show_dictionary(request, project, lang):
prj = get_object_or_404(Project, slug=project)
prj.check_acl(request)
lang = get_object_or_404(Language, code=lang)
if (request.method == 'POST'
and request.user.has_perm('trans.add_dictionary')):
form = WordForm(request.POST)
if form.is_valid():
Dictionary.objects.create(
project=prj,
language=lang,
source=form.cleaned_data['source'],
target=form.cleaned_data['target']
)
return HttpResponseRedirect(request.get_full_path())
else:
form = WordForm()
uploadform = DictUploadForm()
words = Dictionary.objects.filter(
project=prj, language=lang
).order_by('source')
limit = request.GET.get('limit', 25)
page = request.GET.get('page', 1)
letterform = LetterForm(request.GET)
if letterform.is_valid() and letterform.cleaned_data['letter'] != '':
words = words.filter(
source__istartswith=letterform.cleaned_data['letter']
)
letter = letterform.cleaned_data['letter']
else:
letter = ''
paginator = Paginator(words, limit)
try:
words = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
words = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
words = paginator.page(paginator.num_pages)
return render_to_response('dictionary.html', RequestContext(request, {
'title': _('%(language)s dictionary for %(project)s') %
{'language': lang, 'project': prj},
'project': prj,
'language': lang,
'words': words,
'form': form,
'uploadform': uploadform,
'letterform': letterform,
'letter': letter,
}))
def show_engage(request, project, lang=None):
# Get project object
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
# Handle language parameter
language = None
if lang is not None:
try:
django.utils.translation.activate(lang)
except:
# Ignore failure on activating language
pass
try:
language = Language.objects.get(code=lang)
except Language.DoesNotExist:
pass
context = {
'object': obj,
'project': obj.name,
'languages': obj.get_language_count(),
'total': obj.get_total(),
'percent': obj.get_translated_percent(language),
'url': obj.get_absolute_url(),
'language': language,
}
# Render text
if language is None:
status_text = _(
'<a href="%(url)s">Translation project for %(project)s</a> '
'currently contains %(total)s strings for translation and is '
'<a href="%(url)s">being translated into %(languages)s languages'
'</a>. Overall, these translations are %(percent)s%% complete.'
)
else:
# Translators: line of text in engagement widget, please use your
# language name instead of English
status_text = _(
'<a href="%(url)s">Translation project for %(project)s</a> into '
'English currently contains %(total)s strings for translation and '
'is %(percent)s%% complete.'
)
if 'English' in status_text:
status_text = status_text.replace('English', language.name)
context['status_text'] = mark_safe(status_text % context)
return render_to_response('engage.html', RequestContext(request, context))
def show_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
dicts = Dictionary.objects.filter(
project=obj
).values_list(
'language', flat=True
).distinct()
last_changes = Change.objects.filter(
translation__subproject__project=obj
).order_by('-timestamp')[:10]
return render_to_response('project.html', RequestContext(request, {
'object': obj,
'dicts': Language.objects.filter(id__in=dicts),
'last_changes': last_changes,
'last_changes_rss': reverse(
'rss-project',
kwargs={'project': obj.slug}
),
}))
def show_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
last_changes = Change.objects.filter(
translation__subproject=obj
).order_by('-timestamp')[:10]
return render_to_response('subproject.html', RequestContext(request, {
'object': obj,
'last_changes': last_changes,
'last_changes_rss': reverse(
'rss-subproject',
kwargs={'subproject': obj.slug, 'project': obj.project.slug}
),
}))
@login_required
@permission_required('trans.automatic_translation')
def auto_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
obj.commit_pending()
autoform = AutoForm(obj, request.POST)
change = None
if not obj.subproject.locked and autoform.is_valid():
if autoform.cleaned_data['inconsistent']:
units = obj.unit_set.filter_type('inconsistent', obj)
elif autoform.cleaned_data['overwrite']:
units = obj.unit_set.all()
else:
units = obj.unit_set.filter(translated=False)
sources = Unit.objects.filter(
translation__language=obj.language,
translated=True
)
if autoform.cleaned_data['subproject'] == '':
sources = sources.filter(
translation__subproject__project=obj.subproject.project
).exclude(
translation=obj
)
else:
subprj = SubProject.objects.get(
project=obj.subproject.project,
slug=autoform.cleaned_data['subproject']
)
sources = sources.filter(translation__subproject=subprj)
for unit in units.iterator():
update = sources.filter(checksum=unit.checksum)
if update.exists():
# Get first entry
update = update[0]
# No save if translation is same
if unit.fuzzy == update.fuzzy and unit.target == update.target:
continue
# Copy translation
unit.fuzzy = update.fuzzy
unit.target = update.target
# Create signle change object for whole merge
if change is None:
change = Change.objects.create(
unit=unit,
translation=unit.translation,
user=request.user
)
# Save unit to backend
unit.save_backend(request, False, False)
messages.info(request, _('Automatic translation completed.'))
else:
messages.error(request, _('Failed to process form!'))
return HttpResponseRedirect(obj.get_absolute_url())
def review_source(request, project, subproject):
'''
Listing of source strings to review.
'''
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
if not obj.translation_set.exists():
raise Http404('No translation exists in this subproject.')
# Grab first translation in subproject
# (this assumes all have same source strings)
source = obj.translation_set.all()[0]
# Grab search type and page number
rqtype = request.GET.get('type', 'all')
limit = request.GET.get('limit', 50)
page = request.GET.get('page', 1)
# Fiter units
sources = source.unit_set.filter_type(rqtype, source)
paginator = Paginator(sources, limit)
try:
sources = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
sources = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
sources = paginator.page(paginator.num_pages)
return render_to_response('source-review.html', RequestContext(request, {
'object': obj,
'source': source,
'sources': sources,
'title': _('Review source strings in %s') % obj.__unicode__(),
}))
def show_source(request, project, subproject):
'''
Show source strings summary and checks.
'''
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
if not obj.translation_set.exists():
raise Http404('No translation exists in this subproject.')
# Grab first translation in subproject
# (this assumes all have same source strings)
source = obj.translation_set.all()[0]
return render_to_response('source.html', RequestContext(request, {
'object': obj,
'source': source,
'title': _('Source strings in %s') % obj.__unicode__(),
}))
def show_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
last_changes = Change.objects.filter(
translation=obj
).order_by('-timestamp')[:10]
# Check locks
obj.is_locked(request)
# How much is user allowed to configure upload?
if request.user.has_perm('trans.author_translation'):
form = ExtraUploadForm()
elif request.user.has_perm('trans.overwrite_translation'):
form = UploadForm()
else:
form = SimpleUploadForm()
# Is user allowed to do automatic translation?
if request.user.has_perm('trans.automatic_translation'):
autoform = AutoForm(obj)
else:
autoform = None
# Search form for everybody
search_form = SearchForm()
# Review form for logged in users
if request.user.is_anonymous():
review_form = None
else:
review_form = ReviewForm(
initial={
'date': datetime.date.today() - datetime.timedelta(days=31)
}
)
return render_to_response('translation.html', RequestContext(request, {
'object': obj,
'form': form,
'autoform': autoform,
'search_form': search_form,
'review_form': review_form,
'last_changes': last_changes,
'last_changes_rss': reverse(
'rss-translation',
kwargs={
'lang': obj.language.code,
'subproject': obj.subproject.slug,
'project': obj.subproject.project.slug
}
),
}))
@login_required
@permission_required('trans.commit_translation')
def commit_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
obj.commit_pending()
messages.info(request, _('All pending translations were committed.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.commit_translation')
def commit_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
obj.commit_pending()
messages.info(request, _('All pending translations were committed.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.commit_translation')
def commit_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
obj.commit_pending()
messages.info(request, _('All pending translations were committed.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.update_translation')
def update_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
if obj.do_update(request):
messages.info(request, _('All repositories were updated.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.update_translation')
def update_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
if obj.do_update(request):
messages.info(request, _('All repositories were updated.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.update_translation')
def update_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if obj.do_update(request):
messages.info(request, _('All repositories were updated.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.push_translation')
def push_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
if obj.do_push(request):
messages.info(request, _('All repositories were pushed.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.push_translation')
def push_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
if obj.do_push(request):
messages.info(request, _('All repositories were pushed.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.push_translation')
def push_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if obj.do_push(request):
messages.info(request, _('All repositories were pushed.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.reset_translation')
def reset_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
if obj.do_reset(request):
messages.info(request, _('All repositories have been reset.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.reset_translation')
def reset_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
if obj.do_reset(request):
messages.info(request, _('All repositories have been reset.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.reset_translation')
def reset_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if obj.do_reset(request):
messages.info(request, _('All repositories have been reset.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.lock_translation')
def lock_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if not obj.is_user_locked(request):
obj.create_lock(request.user, True)
messages.info(request, _('Translation is now locked for you.'))
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
def update_lock(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if not obj.is_user_locked(request):
obj.update_lock_time()
return HttpResponse('ok')
@login_required
@permission_required('trans.lock_translation')
def unlock_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if not obj.is_user_locked(request):
obj.create_lock(None)
messages.info(
request,
_('Translation is now open for translation updates.')
)
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.lock_subproject')
def lock_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
obj.commit_pending()
obj.locked = True
obj.save()
messages.info(
request,
_('Subproject is now locked for translation updates!')
)
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.lock_subproject')
def unlock_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
obj.locked = False
obj.save()
messages.info(
request,
_('Subproject is now open for translation updates.')
)
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.lock_subproject')
def lock_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
obj.commit_pending()
for subproject in obj.subproject_set.all():
subproject.locked = True
subproject.save()
messages.info(
request,
_('All subprojects are now locked for translation updates!')
)
return HttpResponseRedirect(obj.get_absolute_url())
@login_required
@permission_required('trans.lock_subproject')
def unlock_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
for subproject in obj.subproject_set.all():
subproject.locked = False
subproject.save()
messages.info(request, _('Project is now open for translation updates.'))
return HttpResponseRedirect(obj.get_absolute_url())
def download_translation(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
# Retrieve ttkit store to get extension and mime type
store = obj.get_store()
srcfilename = obj.get_filename()
if store.Mimetypes is None:
# Properties files do not expose mimetype
mime = 'text/plain'
else:
mime = store.Mimetypes[0]
if store.Extensions is None:
# Typo in translate-toolkit 1.9, see
# https://github.com/translate/translate/pull/10
if hasattr(store, 'Exensions'):
ext = store.Exensions[0]
else:
ext = 'txt'
else:
ext = store.Extensions[0]
# Construct file name (do not use real filename as it is usually not
# that useful)
filename = '%s-%s-%s.%s' % (project, subproject, lang, ext)
# Django wrapper for sending file
wrapper = FixedFileWrapper(file(srcfilename))
response = HttpResponse(wrapper, mimetype=mime)
# Fill in response headers
response['Content-Disposition'] = 'attachment; filename=%s' % filename
response['Content-Length'] = os.path.getsize(srcfilename)
return response
def bool2str(val):
if val:
return 'on'
return ''
def parse_search_url(request):
# Check where we are
rqtype = request.REQUEST.get('type', 'all')
direction = request.REQUEST.get('dir', 'forward')
pos = request.REQUEST.get('pos', '-1')
try:
pos = int(pos)
except:
pos = -1
# Pre-process search form
if request.method == 'POST':
search_form = SearchForm(request.POST)
else:
search_form = SearchForm(request.GET)
if search_form.is_valid():
search_query = search_form.cleaned_data['q']
search_type = search_form.cleaned_data['search']
if search_type == '':
search_type = 'ftx'
search_source = search_form.cleaned_data['src']
search_target = search_form.cleaned_data['tgt']
search_context = search_form.cleaned_data['ctx']
# Sane defaults
if not search_context and not search_source and not search_target:
search_source = True
search_target = True
search_url = '&q=%s&src=%s&tgt=%s&ctx=%s&search=%s' % (
search_query,
bool2str(search_source),
bool2str(search_target),
bool2str(search_context),
search_type,
)
else:
search_query = ''
search_type = 'ftx'
search_source = True
search_target = True
search_context = False
search_url = ''
if 'date' in request.REQUEST:
search_url += '&date=%s' % request.REQUEST['date']
return (
rqtype,
direction,
pos,
search_query,
search_type,
search_source,
search_target,
search_context,
search_url
)
def get_filter_name(rqtype, search_query):
'''
Returns name of current filter.
'''
if search_query != '':
return _('Search for "%s"') % search_query
if rqtype == 'all':
return None
elif rqtype == 'fuzzy':
return _('Fuzzy strings')
elif rqtype == 'untranslated':
return _('Untranslated strings')
elif rqtype == 'suggestions':
return _('Strings with suggestions')
elif rqtype == 'allchecks':
return _('Strings with any failing checks')
elif rqtype in CHECKS:
return CHECKS[rqtype].name
else:
return None
def translate(request, project, subproject, lang):
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
# Check locks
project_locked, user_locked, own_lock = obj.is_locked(request, True)
locked = project_locked or user_locked
if request.user.is_authenticated():
profile = request.user.get_profile()
antispam = None
else:
profile = None
antispam = AntispamForm()
secondary = None
unit = None
rqtype, direction, pos, search_query, search_type, search_source, search_target, search_context, search_url = parse_search_url(request)
# Any form submitted?
if request.method == 'POST':
# Antispam protection
if not request.user.is_authenticated():
antispam = AntispamForm(request.POST)
if not antispam.is_valid():
# Silently redirect to next entry
return HttpResponseRedirect('%s?type=%s&pos=%d%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
form = TranslationForm(request.POST)
if form.is_valid() and not project_locked:
# Check whether translation is not outdated
obj.check_sync()
try:
try:
unit = Unit.objects.get(
checksum=form.cleaned_data['checksum'],
translation=obj
)
except Unit.MultipleObjectsReturned:
# Possible temporary inconsistency caused by ongoing update
# of repo, let's pretend everyting is okay
unit = Unit.objects.filter(
checksum=form.cleaned_data['checksum'],
translation=obj
)[0]
if 'suggest' in request.POST:
# Handle suggesion saving
user = request.user
if isinstance(user, AnonymousUser):
user = None
if form.cleaned_data['target'] == len(form.cleaned_data['target']) * ['']:
messages.error(request, _('Your suggestion is empty!'))
# Stay on same entry
return HttpResponseRedirect(
'%s?type=%s&pos=%d&dir=stay%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
)
)
# Create the suggestion
sug = Suggestion.objects.create(
target=join_plural(form.cleaned_data['target']),
checksum=unit.checksum,
language=unit.translation.language,
project=unit.translation.subproject.project,
user=user)
# Record in change
Change.objects.create(
unit=unit,
action=Change.ACTION_SUGGESTION,
translation=unit.translation,
user=user
)
# Invalidate counts cache
unit.translation.invalidate_cache('suggestions')
# Invite user to become translator if there is nobody else
recent_changes = Change.objects.content().filter(
translation=unit.translation,
).exclude(
user=None
).order_by('-timestamp')
if recent_changes.count() == 0 or True:
messages.info(
request,
_('There is currently no active translator for this translation, please consider becoming a translator as your suggestion might otherwise remain unreviewed.')
)
# Notify subscribed users
subscriptions = Profile.objects.subscribed_new_suggestion(
obj.subproject.project,
obj.language,
request.user
)
for subscription in subscriptions:
subscription.notify_new_suggestion(obj, sug, unit)
# Update suggestion stats
if profile is not None:
profile.suggested += 1
profile.save()
elif not request.user.is_authenticated():
# We accept translations only from authenticated
messages.error(
request,
_('You need to log in to be able to save translations!')
)
elif not request.user.has_perm('trans.save_translation'):
# Need privilege to save
messages.error(
request,
_('You don\'t have privileges to save translations!')
)
elif not user_locked:
# Remember old checks
oldchecks = set(
unit.active_checks().values_list('check', flat=True)
)
# Update unit and save it
unit.target = join_plural(form.cleaned_data['target'])
unit.fuzzy = form.cleaned_data['fuzzy']
saved = unit.save_backend(request)
if saved:
# Get new set of checks
newchecks = set(
unit.active_checks().values_list('check', flat=True)
)
# Did we introduce any new failures?
if newchecks > oldchecks:
# Show message to user
messages.error(
request,
_('Some checks have failed on your translation!')
)
# Stay on same entry
return HttpResponseRedirect(
'%s?type=%s&pos=%d&dir=stay%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
)
)
# Redirect to next entry
return HttpResponseRedirect('%s?type=%s&pos=%d%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
except Unit.DoesNotExist:
logger.error(
'message %s disappeared!',
form.cleaned_data['checksum']
)
messages.error(
request,
_('Message you wanted to translate is no longer available!')
)
# Handle translation merging
if 'merge' in request.GET and not locked:
if not request.user.has_perm('trans.save_translation'):
# Need privilege to save
messages.error(
request,
_('You don\'t have privileges to save translations!')
)
else:
try:
mergeform = MergeForm(request.GET)
if mergeform.is_valid():
try:
unit = Unit.objects.get(
checksum=mergeform.cleaned_data['checksum'],
translation=obj
)
except Unit.MultipleObjectsReturned:
# Possible temporary inconsistency caused by ongoing
# update of repo, let's pretend everyting is okay
unit = Unit.objects.filter(
checksum=mergeform.cleaned_data['checksum'],
translation=obj
)[0]
merged = Unit.objects.get(
pk=mergeform.cleaned_data['merge']
)
if unit.checksum != merged.checksum:
messages.error(
request,
_('Can not merge different messages!')
)
else:
# Store unit
unit.target = merged.target
unit.fuzzy = merged.fuzzy
saved = unit.save_backend(request)
# Update stats if there was change
if saved:
profile.translated += 1
profile.save()
# Redirect to next entry
return HttpResponseRedirect('%s?type=%s&pos=%d%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
except Unit.DoesNotExist:
logger.error(
'message %s disappeared!',
form.cleaned_data['checksum']
)
messages.error(
request,
_('Message you wanted to translate is no longer available!')
)
# Handle accepting/deleting suggestions
if not locked and ('accept' in request.GET or 'delete' in request.GET):
# Check for authenticated users
if not request.user.is_authenticated():
messages.error(request, _('You need to log in to be able to manage suggestions!'))
return HttpResponseRedirect('%s?type=%s&pos=%d&dir=stay%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
# Parse suggestion ID
if 'accept' in request.GET:
if not request.user.has_perm('trans.accept_suggestion'):
messages.error(request, _('You do not have privilege to accept suggestions!'))
return HttpResponseRedirect('%s?type=%s&pos=%d&dir=stay%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
sugid = request.GET['accept']
else:
if not request.user.has_perm('trans.delete_suggestion'):
messages.error(request, _('You do not have privilege to delete suggestions!'))
return HttpResponseRedirect('%s?type=%s&pos=%d&dir=stay%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
sugid = request.GET['delete']
try:
sugid = int(sugid)
suggestion = Suggestion.objects.get(pk=sugid)
except:
suggestion = None
if suggestion is not None:
if 'accept' in request.GET:
# Accept suggesiont
suggestion.accept(request)
# Invalidate caches
for unit in Unit.objects.filter(checksum=suggestion.checksum):
unit.translation.invalidate_cache('suggestions')
# Delete suggestion in both cases (accepted ones are no longer
# needed)
suggestion.delete()
else:
messages.error(request, _('Invalid suggestion!'))
# Redirect to same entry for possible editing
return HttpResponseRedirect('%s?type=%s&pos=%d&dir=stay%s' % (
obj.get_translate_url(),
rqtype,
pos,
search_url
))
reviewform = ReviewForm(request.GET)
if reviewform.is_valid():
allunits = obj.unit_set.review(
reviewform.cleaned_data['date'],
request.user
)
# Review
if direction == 'stay':
units = allunits.filter(position=pos)
elif direction == 'back':
units = allunits.filter(position__lt=pos).order_by('-position')
else:
units = allunits.filter(position__gt=pos)
elif search_query != '':
# Apply search conditions
if search_type == 'exact':
query = Q()
if search_source:
query |= Q(source=search_query)
if search_target:
query |= Q(target=search_query)
if search_context:
query |= Q(context=search_query)
allunits = obj.unit_set.filter(query)
elif search_type == 'substring':
query = Q()
if search_source:
query |= Q(source__icontains=search_query)
if search_target:
query |= Q(target__icontains=search_query)
if search_context:
query |= Q(context__icontains=search_query)
allunits = obj.unit_set.filter(query)
else:
allunits = obj.unit_set.search(
search_query,
search_source,
search_context,
search_target
)
if direction == 'stay':
units = obj.unit_set.filter(position=pos)
elif direction == 'back':
units = allunits.filter(position__lt=pos).order_by('-position')
else:
units = allunits.filter(position__gt=pos)
elif 'checksum' in request.GET:
allunits = obj.unit_set.filter(checksum=request.GET['checksum'])
units = allunits
else:
allunits = obj.unit_set.filter_type(rqtype, obj)
# What unit set is about to show
if direction == 'stay':
units = obj.unit_set.filter(position=pos)
elif direction == 'back':
units = allunits.filter(position__lt=pos).order_by('-position')
else:
units = allunits.filter(position__gt=pos)
# If we failed to get unit above or on no POST
if unit is None:
# Grab actual unit
try:
unit = units[0]
except IndexError:
messages.info(request, _('You have reached end of translating.'))
return HttpResponseRedirect(obj.get_absolute_url())
# Show secondary languages for logged in users
if profile:
secondary_langs = profile.secondary_languages.exclude(
id=unit.translation.language.id
)
project = unit.translation.subproject.project
secondary = Unit.objects.filter(
checksum=unit.checksum,
translated=True,
translation__subproject__project=project,
translation__language__in=secondary_langs,
)
# distinct('target') works with Django 1.4 so let's emulate that
# based on presumption we won't get too many results
targets = {}
res = []
for lang in secondary:
if lang.target in targets:
continue
targets[lang.target] = 1
res.append(lang)
secondary = res
# Prepare form
form = TranslationForm(initial={
'checksum': unit.checksum,
'target': (unit.translation.language, unit.get_target_plurals()),
'fuzzy': unit.fuzzy,
})
total = obj.unit_set.all().count()
filter_count = allunits.count()
return render_to_response(
'translate.html',
RequestContext(request, {
'object': obj,
'unit': unit,
'last_changes': unit.change_set.all()[:10],
'total': total,
'type': rqtype,
'filter_name': get_filter_name(rqtype, search_query),
'filter_count': filter_count,
'filter_pos': filter_count + 1 - units.count(),
'form': form,
'antispam': antispam,
'comment_form': CommentForm(),
'target_language': obj.language.code.replace('_', '-').lower(),
'update_lock': own_lock,
'secondary': secondary,
'search_query': search_query,
'search_url': search_url,
'search_source': bool2str(search_source),
'search_type': search_type,
'search_target': bool2str(search_target),
'search_context': bool2str(search_context),
'locked': locked,
'user_locked': user_locked,
'project_locked': project_locked,
},
))
@login_required
def comment(request, pk):
'''
Adds new comment.
'''
obj = get_object_or_404(Unit, pk=pk)
obj.check_acl(request)
if request.POST.get('type', '') == 'source':
lang = None
else:
lang = obj.translation.language
form = CommentForm(request.POST)
if form.is_valid():
new_comment = Comment.objects.create(
user=request.user,
checksum=obj.checksum,
project=obj.translation.subproject.project,
comment=form.cleaned_data['comment'],
language=lang
)
Change.objects.create(
unit=obj,
action=Change.ACTION_COMMENT,
translation=obj.translation,
user=request.user
)
# Invalidate counts cache
if lang is None:
obj.translation.invalidate_cache('sourcecomments')
else:
obj.translation.invalidate_cache('targetcomments')
messages.info(request, _('Posted new comment'))
# Notify subscribed users
subscriptions = Profile.objects.subscribed_new_comment(
obj.translation.subproject.project,
lang,
request.user
)
for subscription in subscriptions:
subscription.notify_new_comment(obj, new_comment)
# Notify upstream
if lang is None and obj.translation.subproject.report_source_bugs != '':
send_notification_email(
'en',
obj.translation.subproject.report_source_bugs,
'new_comment',
obj.translation,
{
'unit': obj,
'comment': new_comment,
'subproject': obj.translation.subproject,
},
from_email=request.user.email,
)
else:
messages.error(request, _('Failed to add comment!'))
return HttpResponseRedirect(obj.get_absolute_url())
def get_string(request, checksum):
'''
AJAX handler for getting raw string.
'''
units = Unit.objects.filter(checksum=checksum)
if units.count() == 0:
return HttpResponse('')
units[0].check_acl(request)
return HttpResponse(units[0].get_source_plurals()[0])
def get_similar(request, unit_id):
'''
AJAX handler for getting similar strings.
'''
unit = get_object_or_404(Unit, pk=int(unit_id))
unit.check_acl(request)
similar_units = Unit.objects.similar(unit)
# distinct('target') works with Django 1.4 so let's emulate that
# based on presumption we won't get too many results
targets = {}
res = []
for similar in similar_units:
if similar.target in targets:
continue
targets[similar.target] = 1
res.append(similar)
similar = res
return render_to_response('js/similar.html', RequestContext(request, {
'similar': similar,
}))
def get_other(request, unit_id):
'''
AJAX handler for same strings in other subprojects.
'''
unit = get_object_or_404(Unit, pk=int(unit_id))
unit.check_acl(request)
other = Unit.objects.same(unit)
rqtype, direction, pos, search_query, search_type, search_source, search_target, search_context, search_url = parse_search_url(request)
return render_to_response('js/other.html', RequestContext(request, {
'other': other,
'unit': unit,
'type': rqtype,
'search_url': search_url,
}))
def get_dictionary(request, unit_id):
'''
Lists words from dictionary for current translation.
'''
unit = get_object_or_404(Unit, pk=int(unit_id))
unit.check_acl(request)
words = set()
# Prepare analyzers
# - standard analyzer simply splits words
# - stemming extracts stems, to catch things like plurals
analyzers = (StandardAnalyzer(), StemmingAnalyzer())
# Extract words from all plurals and from context
for text in unit.get_source_plurals() + [unit.context]:
for analyzer in analyzers:
words = words.union([token.text for token in analyzer(text)])
# Grab all words in the dictionary
dictionary = Dictionary.objects.filter(
project = unit.translation.subproject.project,
language = unit.translation.language
)
if len(words) == 0:
# No extracted words, no dictionary
dictionary = dictionary.none()
else:
# Build the query (can not use __in as we want case insensitive lookup)
query = Q()
for word in words:
query |= Q(source__iexact=word)
# Filter dictionary
dictionary = dictionary.filter(query)
return render_to_response('js/dictionary.html', RequestContext(request, {
'dictionary': dictionary,
}))
@login_required
@permission_required('trans.ignore_check')
def ignore_check(request, check_id):
obj = get_object_or_404(Check, pk=int(check_id))
obj.project.check_acl(request)
# Mark check for ignoring
obj.ignore = True
obj.save()
# Invalidate caches
for unit in Unit.objects.filter(checksum=obj.checksum):
unit.translation.invalidate_cache()
# response for AJAX
return HttpResponse('ok')
@login_required
@permission_required('trans.upload_translation')
def upload_translation(request, project, subproject, lang):
'''
Handling of translation uploads.
'''
obj = get_object_or_404(
Translation,
language__code=lang,
subproject__slug=subproject,
subproject__project__slug=project,
enabled=True
)
obj.check_acl(request)
if not obj.is_locked(request) and request.method == 'POST':
if request.user.has_perm('trans.author_translation'):
form = ExtraUploadForm(request.POST, request.FILES)
elif request.user.has_perm('trans.overwrite_translation'):
form = UploadForm(request.POST, request.FILES)
else:
form = SimpleUploadForm(request.POST, request.FILES)
if form.is_valid():
if request.user.has_perm('trans.author_translation') and form.cleaned_data['author_name'] != '' and form.cleaned_data['author_email'] != '':
author = '%s <%s>' % (form.cleaned_data['author_name'], form.cleaned_data['author_email'])
else:
author = None
if request.user.has_perm('trans.overwrite_translation'):
overwrite = form.cleaned_data['overwrite']
else:
overwrite = False
try:
ret = obj.merge_upload(request, request.FILES['file'], overwrite, author, merge_header=form.cleaned_data['merge_header'])
if ret:
messages.info(request, _('File content successfully merged into translation.'))
else:
messages.info(request, _('There were no new strings in uploaded file.'))
except Exception as e:
messages.error(request, _('File content merge failed: %s' % unicode(e)))
return HttpResponseRedirect(obj.get_absolute_url())
def not_found(request):
'''
Error handler showing list of available projects.
'''
template = loader.get_template('404.html')
return HttpResponseNotFound(
template.render(RequestContext(request, {
'request_path': request.path,
'title': _('Page Not Found'),
'projects': Project.objects.all_acl(request.user),
}
)))
# Cache this page for one month, it should not really change much
@cache_page(30 * 24 * 3600)
def js_config(request):
'''
Generates settings for javascript. Includes things like
API keys for translaiton services or list of languages they
support.
'''
# Apertium support
if appsettings.MT_APERTIUM_KEY is not None and appsettings.MT_APERTIUM_KEY != '':
try:
listpairs = urllib2.urlopen('http://api.apertium.org/json/listPairs?key=%s' % appsettings.MT_APERTIUM_KEY)
pairs = listpairs.read()
parsed = json.loads(pairs)
apertium_langs = [p['targetLanguage'] for p in parsed['responseData'] if p['sourceLanguage'] == 'en']
except Exception as e:
logger.error('failed to get supported languages from Apertium, using defaults (%s)', str(e))
apertium_langs = ['gl', 'ca', 'es', 'eo']
else:
apertium_langs = None
# Microsoft translator support
if appsettings.MT_MICROSOFT_KEY is not None and appsettings.MT_MICROSOFT_KEY != '':
try:
listpairs = urllib2.urlopen('http://api.microsofttranslator.com/V2/Http.svc/GetLanguagesForTranslate?appID=%s' % appsettings.MT_MICROSOFT_KEY)
data = listpairs.read()
parsed = ElementTree.fromstring(data)
microsoft_langs = [p.text for p in parsed.getchildren()]
except Exception as e:
logger.error('failed to get supported languages from Microsoft, using defaults (%s)', str(e))
microsoft_langs = [
'ar', 'bg', 'ca', 'zh-CHS', 'zh-CHT', 'cs', 'da', 'nl', 'en',
'et', 'fi', 'fr', 'de', 'el', 'ht', 'he', 'hi', 'mww', 'hu',
'id', 'it', 'ja', 'ko', 'lv', 'lt', 'no', 'fa', 'pl', 'pt',
'ro', 'ru', 'sk', 'sl', 'es', 'sv', 'th', 'tr', 'uk', 'vi'
]
else:
microsoft_langs = None
return render_to_response('js/config.js', RequestContext(request, {
'apertium_langs': apertium_langs,
'microsoft_langs': microsoft_langs,
}),
mimetype = 'application/javascript')
def about(request):
context = {}
versions = get_versions()
totals = Profile.objects.aggregate(Sum('translated'), Sum('suggested'))
total_strings = 0
for project in SubProject.objects.iterator():
try:
total_strings += project.translation_set.all()[0].total
except Translation.DoesNotExist:
pass
context['title'] = _('About Weblate')
context['total_translations'] = totals['translated__sum']
context['total_suggestions'] = totals['suggested__sum']
context['total_users'] = Profile.objects.count()
context['total_strings'] = total_strings
context['total_languages'] = Language.objects.filter(
translation__total__gt=0
).distinct().count()
context['total_checks'] = Check.objects.count()
context['ignored_checks'] = Check.objects.filter(ignore=True).count()
context['versions'] = versions
return render_to_response('about.html', RequestContext(request, context))
@user_passes_test(lambda u: u.has_perm('trans.commit_translation') or u.has_perm('trans.update_translation'))
def git_status_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
return render_to_response('js/git-status.html', RequestContext(request, {
'object': obj,
}))
@user_passes_test(lambda u: u.has_perm('trans.commit_translation') or u.has_perm('trans.update_translation'))
def git_status_subproject(request, project, subproject):
obj = get_object_or_404(SubProject, slug=subproject, project__slug=project)
obj.check_acl(request)
return render_to_response('js/git-status.html', RequestContext(request, {
'object': obj,
}))
@user_passes_test(lambda u: u.has_perm('trans.commit_translation') or u.has_perm('trans.update_translation'))
def git_status_translation(request, project, subproject, lang):
obj = get_object_or_404(Translation, language__code=lang, subproject__slug=subproject, subproject__project__slug=project, enabled=True)
obj.check_acl(request)
return render_to_response('js/git-status.html', RequestContext(request, {
'object': obj,
}))
def data_root(request):
site = Site.objects.get_current()
return render_to_response('data-root.html', RequestContext(request, {
'site_domain': site.domain,
'api_docs': weblate.get_doc_url('api', 'exports'),
'rss_docs': weblate.get_doc_url('api', 'rss'),
'projects': Project.objects.all_acl(request.user),
}))
def data_project(request, project):
obj = get_object_or_404(Project, slug=project)
obj.check_acl(request)
site = Site.objects.get_current()
return render_to_response('data.html', RequestContext(request, {
'object': obj,
'site_domain': site.domain,
'api_docs': weblate.get_doc_url('api', 'exports'),
'rss_docs': weblate.get_doc_url('api', 'rss'),
}))<|fim▁end|> | check = CHECKS[name]
except KeyError: |
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2016 Michal Čihař <[email protected]>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Tests for user models.
"""
from django.contrib.auth.models import User, Group
from django.test import TestCase
from weblate.accounts.models import AutoGroup
class AutoGroupTest(TestCase):
@staticmethod
def create_user():
return User.objects.create_user('test1', '[email protected]', 'pass')
def test_default(self):
user = self.create_user()
self.assertEqual(user.groups.count(), 1)
def test_none(self):
AutoGroup.objects.all().delete()<|fim▁hole|>
def test_matching(self):
AutoGroup.objects.create(
match='^.*@weblate.org',
group=Group.objects.get(name='Guests')
)
user = self.create_user()
self.assertEqual(user.groups.count(), 2)
def test_nonmatching(self):
AutoGroup.objects.create(
match='^.*@example.net',
group=Group.objects.get(name='Guests')
)
user = self.create_user()
self.assertEqual(user.groups.count(), 1)<|fim▁end|> | user = self.create_user()
self.assertEqual(user.groups.count(), 0) |
<|file_name|>network_api_demo.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import copy
import csv
import json
import os
from pkg_resources import resource_filename
from nupic.algorithms.anomaly import computeRawAnomalyScore
from nupic.data.file_record_stream import FileRecordStream
from nupic.engine import Network
from nupic.encoders import MultiEncoder, ScalarEncoder, DateEncoder
_VERBOSITY = 0 # how chatty the demo should be
_SEED = 1956 # the random seed used throughout
_INPUT_FILE_PATH = resource_filename(
"nupic.datafiles", "extra/hotgym/rec-center-hourly.csv"
)
_OUTPUT_PATH = "network-demo-output.csv"
_NUM_RECORDS = 2000
# Config field for SPRegion
SP_PARAMS = {
"spVerbosity": _VERBOSITY,
"spatialImp": "cpp",
"globalInhibition": 1,
"columnCount": 2048,
# This must be set before creating the SPRegion
"inputWidth": 0,
"numActiveColumnsPerInhArea": 40,
"seed": 1956,
"potentialPct": 0.8,
"synPermConnected": 0.1,
"synPermActiveInc": 0.0001,
"synPermInactiveDec": 0.0005,
"maxBoost": 1.0,
}
# Config field for TPRegion
TP_PARAMS = {
"verbosity": _VERBOSITY,
"columnCount": 2048,
"cellsPerColumn": 32,
"inputWidth": 2048,
"seed": 1960,
"temporalImp": "cpp",
"newSynapseCount": 20,
"maxSynapsesPerSegment": 32,
"maxSegmentsPerCell": 128,
"initialPerm": 0.21,
"permanenceInc": 0.1,
"permanenceDec": 0.1,
"globalDecay": 0.0,
"maxAge": 0,
"minThreshold": 9,
"activationThreshold": 12,
"outputType": "normal",
"pamLength": 3,
}
def createEncoder():
"""Create the encoder instance for our test and return it."""
consumption_encoder = ScalarEncoder(21, 0.0, 100.0, n=50, name="consumption",
clipInput=True)
time_encoder = DateEncoder(timeOfDay=(21, 9.5), name="timestamp_timeOfDay")
encoder = MultiEncoder()
encoder.addEncoder("consumption", consumption_encoder)
encoder.addEncoder("timestamp", time_encoder)
return encoder
def createNetwork(dataSource):
"""Create the Network instance.
The network has a sensor region reading data from `dataSource` and passing
the encoded representation to an SPRegion. The SPRegion output is passed to
a TPRegion.
:param dataSource: a RecordStream instance to get data from
:returns: a Network instance ready to run
"""
network = Network()
# Our input is sensor data from the gym file. The RecordSensor region
# allows us to specify a file record stream as the input source via the
# dataSource attribute.
network.addRegion("sensor", "py.RecordSensor",
json.dumps({"verbosity": _VERBOSITY}))
sensor = network.regions["sensor"].getSelf()
# The RecordSensor needs to know how to encode the input values
sensor.encoder = createEncoder()
# Specify the dataSource as a file record stream instance
sensor.dataSource = dataSource
# Create the spatial pooler region
SP_PARAMS["inputWidth"] = sensor.encoder.getWidth()
network.addRegion("spatialPoolerRegion", "py.SPRegion", json.dumps(SP_PARAMS))
# Link the SP region to the sensor input
network.link("sensor", "spatialPoolerRegion", "UniformLink", "")
network.link("sensor", "spatialPoolerRegion", "UniformLink", "",
srcOutput="resetOut", destInput="resetIn")
network.link("spatialPoolerRegion", "sensor", "UniformLink", "",
srcOutput="spatialTopDownOut", destInput="spatialTopDownIn")
network.link("spatialPoolerRegion", "sensor", "UniformLink", "",
srcOutput="temporalTopDownOut", destInput="temporalTopDownIn")
# Add the TPRegion on top of the SPRegion
network.addRegion("temporalPoolerRegion", "py.TPRegion",
json.dumps(TP_PARAMS))
network.link("spatialPoolerRegion", "temporalPoolerRegion", "UniformLink", "")
network.link("temporalPoolerRegion", "spatialPoolerRegion", "UniformLink", "",
srcOutput="topDownOut", destInput="topDownIn")
# Add the AnomalyRegion on top of the TPRegion
network.addRegion("anomalyRegion", "py.AnomalyRegion", json.dumps({}))
network.link("spatialPoolerRegion", "anomalyRegion", "UniformLink", "",
srcOutput="bottomUpOut", destInput="activeColumns")
network.link("temporalPoolerRegion", "anomalyRegion", "UniformLink", "",
srcOutput="topDownOut", destInput="predictedColumns")
network.initialize()
spatialPoolerRegion = network.regions["spatialPoolerRegion"]
# Make sure learning is enabled
spatialPoolerRegion.setParameter("learningMode", True)
# We want temporal anomalies so disable anomalyMode in the SP. This mode is
# used for computing anomalies in a non-temporal model.
spatialPoolerRegion.setParameter("anomalyMode", False)
temporalPoolerRegion = network.regions["temporalPoolerRegion"]
# Enable topDownMode to get the predicted columns output
temporalPoolerRegion.setParameter("topDownMode", True)
# Make sure learning is enabled (this is the default)
temporalPoolerRegion.setParameter("learningMode", True)
# Enable inference mode so we get predictions
temporalPoolerRegion.setParameter("inferenceMode", True)
# Enable anomalyMode to compute the anomaly score. This actually doesn't work
# now so doesn't matter. We instead compute the anomaly score based on
# topDownOut (predicted columns) and SP bottomUpOut (active columns).
temporalPoolerRegion.setParameter("anomalyMode", True)
return network
<|fim▁hole|> """Run the network and write output to writer.
:param network: a Network instance to run
:param writer: a csv.writer instance to write output to
"""
sensorRegion = network.regions["sensor"]
spatialPoolerRegion = network.regions["spatialPoolerRegion"]
temporalPoolerRegion = network.regions["temporalPoolerRegion"]
anomalyRegion = network.regions["anomalyRegion"]
prevPredictedColumns = []
i = 0
for _ in xrange(_NUM_RECORDS):
# Run the network for a single iteration
network.run(1)
# Write out the anomaly score along with the record number and consumption
# value.
anomalyScore = anomalyRegion.getOutputData("rawAnomalyScore")[0]
consumption = sensorRegion.getOutputData("sourceOut")[0]
writer.writerow((i, consumption, anomalyScore))
i += 1
if __name__ == "__main__":
dataSource = FileRecordStream(streamID=_INPUT_FILE_PATH)
network = createNetwork(dataSource)
outputPath = os.path.join(os.path.dirname(__file__), _OUTPUT_PATH)
with open(outputPath, "w") as outputFile:
writer = csv.writer(outputFile)
print "Writing output to %s" % outputPath
runNetwork(network, writer)<|fim▁end|> |
def runNetwork(network, writer): |
<|file_name|>filecheckpoints.py<|end_file_name|><|fim▁begin|>"""
File-based Checkpoints implementations.
"""
import os
import shutil
from tornado.web import HTTPError
from .checkpoints import (
Checkpoints,
GenericCheckpointsMixin,
)
from .fileio import FileManagerMixin
from IPython.utils import tz
from IPython.utils.path import ensure_dir_exists
from IPython.utils.py3compat import getcwd
from IPython.utils.traitlets import Unicode
class FileCheckpoints(FileManagerMixin, Checkpoints):
"""
A Checkpoints that caches checkpoints for files in adjacent
directories.
Only works with FileContentsManager. Use GenericFileCheckpoints if
you want file-based checkpoints with another ContentsManager.
"""
checkpoint_dir = Unicode(
'.ipynb_checkpoints',
config=True,
help="""The directory name in which to keep file checkpoints
This is a path relative to the file's own directory.
By default, it is .ipynb_checkpoints
""",
)
root_dir = Unicode(config=True)
def _root_dir_default(self):
try:
return self.parent.root_dir
except AttributeError:
return getcwd()
# ContentsManager-dependent checkpoint API
def create_checkpoint(self, contents_mgr, path):
"""Create a checkpoint."""
checkpoint_id = u'checkpoint'
src_path = contents_mgr._get_os_path(path)
dest_path = self.checkpoint_path(checkpoint_id, path)
self._copy(src_path, dest_path)
return self.checkpoint_model(checkpoint_id, dest_path)
def restore_checkpoint(self, contents_mgr, checkpoint_id, path):
"""Restore a checkpoint."""
src_path = self.checkpoint_path(checkpoint_id, path)
dest_path = contents_mgr._get_os_path(path)
self._copy(src_path, dest_path)
# ContentsManager-independent checkpoint API
def rename_checkpoint(self, checkpoint_id, old_path, new_path):
"""Rename a checkpoint from old_path to new_path."""
old_cp_path = self.checkpoint_path(checkpoint_id, old_path)
new_cp_path = self.checkpoint_path(checkpoint_id, new_path)
if os.path.isfile(old_cp_path):
self.log.debug(
"Renaming checkpoint %s -> %s",
old_cp_path,
new_cp_path,
)
with self.perm_to_403():
shutil.move(old_cp_path, new_cp_path)
def delete_checkpoint(self, checkpoint_id, path):
"""delete a file's checkpoint"""
path = path.strip('/')
cp_path = self.checkpoint_path(checkpoint_id, path)
if not os.path.isfile(cp_path):
self.no_such_checkpoint(path, checkpoint_id)
self.log.debug("unlinking %s", cp_path)
with self.perm_to_403():
os.unlink(cp_path)
def list_checkpoints(self, path):
"""list the checkpoints for a given file
This contents manager currently only supports one checkpoint per file.
"""
path = path.strip('/')
checkpoint_id = "checkpoint"
os_path = self.checkpoint_path(checkpoint_id, path)
if not os.path.isfile(os_path):
return []
else:
return [self.checkpoint_model(checkpoint_id, os_path)]
# Checkpoint-related utilities
def checkpoint_path(self, checkpoint_id, path):
"""find the path to a checkpoint"""
path = path.strip('/')
parent, name = ('/' + path).rsplit('/', 1)
parent = parent.strip('/')
basename, ext = os.path.splitext(name)
filename = u"{name}-{checkpoint_id}{ext}".format(
name=basename,
checkpoint_id=checkpoint_id,
ext=ext,
)
os_path = self._get_os_path(path=parent)
cp_dir = os.path.join(os_path, self.checkpoint_dir)
with self.perm_to_403():
ensure_dir_exists(cp_dir)
cp_path = os.path.join(cp_dir, filename)
return cp_path
def checkpoint_model(self, checkpoint_id, os_path):
"""construct the info dict for a given checkpoint"""
stats = os.stat(os_path)
last_modified = tz.utcfromtimestamp(stats.st_mtime)
info = dict(
id=checkpoint_id,
last_modified=last_modified,
)
return info
# Error Handling
def no_such_checkpoint(self, path, checkpoint_id):
raise HTTPError(
404,<|fim▁hole|>
class GenericFileCheckpoints(GenericCheckpointsMixin, FileCheckpoints):
"""
Local filesystem Checkpoints that works with any conforming
ContentsManager.
"""
def create_file_checkpoint(self, content, format, path):
"""Create a checkpoint from the current content of a file."""
path = path.strip('/')
# only the one checkpoint ID:
checkpoint_id = u"checkpoint"
os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
self.log.debug("creating checkpoint for %s", path)
with self.perm_to_403():
self._save_file(os_checkpoint_path, content, format=format)
# return the checkpoint info
return self.checkpoint_model(checkpoint_id, os_checkpoint_path)
def create_notebook_checkpoint(self, nb, path):
"""Create a checkpoint from the current content of a notebook."""
path = path.strip('/')
# only the one checkpoint ID:
checkpoint_id = u"checkpoint"
os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
self.log.debug("creating checkpoint for %s", path)
with self.perm_to_403():
self._save_notebook(os_checkpoint_path, nb)
# return the checkpoint info
return self.checkpoint_model(checkpoint_id, os_checkpoint_path)
def get_notebook_checkpoint(self, checkpoint_id, path):
"""Get a checkpoint for a notebook."""
path = path.strip('/')
self.log.info("restoring %s from checkpoint %s", path, checkpoint_id)
os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
if not os.path.isfile(os_checkpoint_path):
self.no_such_checkpoint(path, checkpoint_id)
return {
'type': 'notebook',
'content': self._read_notebook(
os_checkpoint_path,
as_version=4,
),
}
def get_file_checkpoint(self, checkpoint_id, path):
"""Get a checkpoint for a file."""
path = path.strip('/')
self.log.info("restoring %s from checkpoint %s", path, checkpoint_id)
os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
if not os.path.isfile(os_checkpoint_path):
self.no_such_checkpoint(path, checkpoint_id)
content, format = self._read_file(os_checkpoint_path, format=None)
return {
'type': 'file',
'content': content,
'format': format,
}<|fim▁end|> | u'Checkpoint does not exist: %s@%s' % (path, checkpoint_id)
)
|
<|file_name|>identity.py<|end_file_name|><|fim▁begin|>"""Identity related views."""
from reversion import revisions as reversion
from django.contrib.auth import mixins as auth_mixins
from django.contrib.auth.decorators import (
login_required, permission_required, user_passes_test
)
from django.shortcuts import render
from django.template.loader import render_to_string
from django.utils.translation import ugettext as _, ungettext
from django.views import generic
from django.views.decorators.csrf import ensure_csrf_cookie
from modoboa.core.models import User
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.listing import get_listing_page, get_sort_order
from modoboa.lib.web_utils import render_to_json_response
from .. import signals<|fim▁hole|>
@login_required
@user_passes_test(
lambda u: u.has_perm("core.add_user") or
u.has_perm("admin.add_alias")
)
def _identities(request):
filters = {
fname: request.GET.get(fname, None)
for fname in ["searchquery", "idtfilter", "grpfilter"]
}
request.session["identities_filters"] = filters
idents_list = get_identities(request.user, **filters)
sort_order, sort_dir = get_sort_order(request.GET, "identity",
["identity", "name_or_rcpt", "tags"])
if sort_order in ["identity", "name_or_rcpt"]:
objects = sorted(idents_list, key=lambda o: getattr(o, sort_order),
reverse=sort_dir == "-")
else:
objects = sorted(idents_list, key=lambda o: o.tags[0]["label"],
reverse=sort_dir == "-")
context = {
"handle_mailboxes": request.localconfig.parameters.get_value(
"handle_mailboxes", raise_exception=False)
}
page = get_listing_page(objects, request.GET.get("page", 1))
context["headers"] = render_to_string(
"admin/identity_headers.html", {}, request)
if page is None:
context["length"] = 0
else:
context["rows"] = render_to_string(
"admin/identities_table.html", {
"identities": page.object_list
}, request
)
context["pages"] = [page.number]
return render_to_json_response(context)
@login_required
@permission_required("admin.add_mailbox")
def list_quotas(request):
from modoboa.lib.db_utils import db_type
sort_order, sort_dir = get_sort_order(request.GET, "address")
mboxes = Mailbox.objects.get_for_admin(
request.user, request.GET.get("searchquery", None)
)
mboxes = mboxes.exclude(quota=0)
if sort_order in ["address", "quota"]:
mboxes = mboxes.order_by("%s%s" % (sort_dir, sort_order))
elif sort_order in ("quota_value__bytes", "quota_usage"):
db_type = db_type()
if db_type == "mysql":
where = "CONCAT(admin_mailbox.address,'@',admin_domain.name)"
else:
where = "admin_mailbox.address||'@'||admin_domain.name"
if sort_order == "quota_value__bytes":
mboxes = mboxes.extra(
select={"quota_value__bytes": "admin_quota.bytes"},
where=["admin_quota.username=%s" % where],
tables=["admin_quota", "admin_domain"],
order_by=["%s%s" % (sort_dir, sort_order)]
)
else:
if db_type == "postgres":
select = (
"(admin_quota.bytes::float / (CAST(admin_mailbox.quota "
"AS BIGINT) * 1048576)) * 100"
)
else:
select = (
"(admin_quota.bytes * 1.0 / (admin_mailbox.quota "
"* 1048576)) * 100"
)
mboxes = mboxes.extra(
select={"quota_usage": select},
where=["admin_quota.username=%s" % where],
tables=["admin_quota", "admin_domain"],
order_by=["%s%s" % (sort_dir, sort_order)]
)
else:
raise BadRequest(_("Invalid request"))
page = get_listing_page(mboxes, request.GET.get("page", 1))
context = {
"headers": render_to_string(
"admin/identities_quota_headers.html", {}, request
)
}
if page is None:
context["length"] = 0
else:
context["rows"] = render_to_string(
"admin/identities_quotas.html", {"mboxes": page}, request
)
context["pages"] = [page.number]
return render_to_json_response(context)
@login_required
@user_passes_test(
lambda u: u.has_perm("core.add_user") or
u.has_perm("admin.add_alias") or
u.has_perm("admin.add_mailbox")
)
def get_next_page(request):
"""Return the next page of the identity list."""
if request.GET.get("objtype", "identity") == "identity":
return _identities(request)
return list_quotas(request)
@login_required
@user_passes_test(
lambda u: u.has_perm("core.add_user") or
u.has_perm("admin.add_alias")
)
@ensure_csrf_cookie
def identities(request, tplname="admin/identities.html"):
return render(request, tplname, {
"selection": "identities",
"deflocation": "list/"
})
@login_required
@permission_required("core.add_user")
def accounts_list(request):
accs = User.objects.filter(is_superuser=False) \
.exclude(groups__name="SimpleUsers")
res = [a.username for a in accs.all()]
return render_to_json_response(res)
@login_required
@permission_required("core.add_user")
@reversion.create_revision()
def newaccount(request):
"""Create a new account."""
return AccountWizard(request).process()
@login_required
@permission_required("core.change_user")
@reversion.create_revision()
def editaccount(request, pk):
account = User.objects.get(pk=pk)
if not request.user.can_access(account):
raise PermDeniedException
mb = account.mailbox if hasattr(account, "mailbox") else None
instances = {
"general": account, "profile": account, "mail": mb, "perms": account
}
results = signals.get_account_form_instances.send(
sender="editaccount", user=request.user, account=account)
for result in results:
instances.update(result[1])
return AccountForm(request, instances=instances).process()
@login_required
@permission_required("core.delete_user")
def delaccount(request, pk):
User.objects.get(pk=pk).delete()
return render_to_json_response(
ungettext("Account deleted", "Accounts deleted", 1)
)
@login_required
@permission_required("admin.add_domain")
def remove_permission(request):
domid = request.GET.get("domid", None)
daid = request.GET.get("daid", None)
if domid is None or daid is None:
raise BadRequest(_("Invalid request"))
try:
account = User.objects.get(pk=daid)
domain = Domain.objects.get(pk=domid)
except (User.DoesNotExist, Domain.DoesNotExist):
raise BadRequest(_("Invalid request"))
if not request.user.can_access(account) or \
not request.user.can_access(domain):
raise PermDeniedException
domain.remove_admin(account)
return render_to_json_response({})
class AccountDetailView(
auth_mixins.PermissionRequiredMixin, generic.DetailView):
"""DetailView for Account."""
model = User
permission_required = "core.add_user"
template_name = "admin/account_detail.html"
def has_permission(self):
"""Check object-level access."""
result = super(AccountDetailView, self).has_permission()
if not result:
return result
return self.request.user.can_access(self.get_object())
def get_context_data(self, **kwargs):
"""Add information to context."""
context = super(AccountDetailView, self).get_context_data(**kwargs)
del context["user"]
result = signals.extra_account_dashboard_widgets.send(
self.__class__, user=self.request.user, account=self.object)
context["templates"] = {"left": [], "right": []}
for _receiver, widgets in result:
for widget in widgets:
context["templates"][widget["column"]].append(
widget["template"])
context.update(widget["context"])
if self.object.role in ["Resellers", "DomainAdmins"]:
context["domains"] = Domain.objects.get_for_admin(self.object)
context["selection"] = "identities"
return context<|fim▁end|> | from ..forms import AccountForm, AccountWizard
from ..lib import get_identities
from ..models import Domain, Mailbox |
<|file_name|>data_message.rs<|end_file_name|><|fim▁begin|>use super::record_content::RecordData;
use super::record_header::RecordHeader;
use super::record_datum::RecordDatum;
#[derive(Debug)]
pub struct DataMessage {
header: RecordHeader,
data: Vec<RecordDatum>
}
impl DataMessage {
pub fn new(raw_header: RecordHeader) -> DataMessage {
DataMessage {
header: raw_header,
data: Vec::new()
}
}
pub fn push_datum(&mut self, d: RecordDatum) {
self.data.push(d);
}
}
impl RecordData for DataMessage {
fn get_header(&self) -> RecordHeader {<|fim▁hole|><|fim▁end|> | return self.header.clone();
}
} |
<|file_name|>my_etree.py<|end_file_name|><|fim▁begin|><|fim▁hole|>"""
__all__ = ('etree', )
try:
from lxml import etree
except ImportError:
try:
import xml.etree.cElementTree as etree
except (ImportError, SystemError):
import xml.etree.ElementTree as etree<|fim▁end|> | """
Module with universal etree module |
<|file_name|>mvn_from_bijector_test.py<|end_file_name|><|fim▁begin|># Copyright 2021 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `mvn_from_bijector.py`."""
from typing import Tuple
from absl.testing import absltest
from absl.testing import parameterized
import chex
from distrax._src.bijectors import bijector
from distrax._src.bijectors.diag_affine import DiagAffine
from distrax._src.bijectors.triangular_affine import TriangularAffine
from distrax._src.distributions.mvn_from_bijector import MultivariateNormalFromBijector
import haiku as hk
import jax
import jax.numpy as jnp
import numpy as np
from tensorflow_probability.substrates import jax as tfp
tfd = tfp.distributions
Array = chex.Array
class DummyBijector(bijector.Bijector):
"""A dummy bijector."""
def forward_and_log_det(self, x: Array) -> Tuple[Array, Array]:
"""Computes y = f(x) and log|det J(f)(x)|."""
return x, jnp.zeros_like(x)[:-1]
class MultivariateNormalFromBijectorTest(parameterized.TestCase):
@parameterized.named_parameters(
('wrong event_ndims_in', 2, 1, True, np.zeros((4,))),
('wrong event_ndims_out', 1, 2, True, np.zeros((4,))),
('not constant Jacobian', 1, 1, False, np.zeros((4,))),
('loc is 0d', 1, 1, True, np.zeros(shape=())),
('loc has more dims than batch_shape', 1, 1, True,
np.zeros(shape=(2, 4))),
)
def test_raises_on_wrong_inputs(
self, event_ndims_in, event_ndims_out, is_constant_jacobian, loc):
bij = DummyBijector(event_ndims_in, event_ndims_out, is_constant_jacobian)
with self.assertRaises(ValueError):
MultivariateNormalFromBijector(loc, bij, batch_shape=())
@parameterized.named_parameters(
('no broadcast', np.ones((4,)), np.zeros((4,)), (4,)),
('broadcasted loc', np.ones((3, 4)), np.zeros((4,)), (3, 4)),
('broadcasted diag', np.ones((4,)), np.zeros((3, 4)), (3, 4)),
)
def test_loc_scale_and_shapes(self, diag, loc, expected_shape):
scale = DiagAffine(diag=diag, bias=jnp.zeros_like(diag))
batch_shape = jnp.broadcast_shapes(diag.shape, loc.shape)[:-1]
dist = MultivariateNormalFromBijector(loc, scale, batch_shape)
np.testing.assert_allclose(dist.loc, np.zeros(expected_shape))
self.assertTrue(scale.same_as(dist.scale))
self.assertEqual(dist.event_shape, (4,))
self.assertEqual(dist.batch_shape, batch_shape)
@chex.all_variants
def test_sample(self):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
diag = 0.5 + jax.random.uniform(next(prng), (4,))
loc = jax.random.normal(next(prng), (4,))
scale = DiagAffine(diag=diag, bias=jnp.zeros_like(diag))
dist = MultivariateNormalFromBijector(loc, scale, batch_shape=())
num_samples = 100_000
sample_fn = lambda seed: dist.sample(seed=seed, sample_shape=num_samples)
samples = self.variant(sample_fn)(jax.random.PRNGKey(2000))
self.assertEqual(samples.shape, (num_samples, 4))
np.testing.assert_allclose(jnp.mean(samples, axis=0), loc, rtol=0.1)
np.testing.assert_allclose(jnp.std(samples, axis=0), diag, rtol=0.1)
@chex.all_variants
def test_log_prob(self):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
diag = 0.5 + jax.random.uniform(next(prng), (4,))
loc = jax.random.normal(next(prng), (4,))
scale = DiagAffine(diag=diag, bias=jnp.zeros_like(diag))
dist = MultivariateNormalFromBijector(loc, scale, batch_shape=())
values = jax.random.normal(next(prng), (5, 4))
tfp_dist = tfd.MultivariateNormalDiag(loc=loc, scale_diag=diag)
np.testing.assert_allclose(
self.variant(dist.log_prob)(values), tfp_dist.log_prob(values))
@chex.all_variants(with_pmap=False)
@parameterized.named_parameters(
('no broadcast', (4,), (4,)),
('broadcasted loc', (3, 4), (4,)),
('broadcasted diag', (4,), (3, 4)),
)
def test_mean_median_mode(self, diag_shape, loc_shape):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
diag = jax.random.normal(next(prng), diag_shape)
loc = jax.random.normal(next(prng), loc_shape)
scale = DiagAffine(diag=diag, bias=jnp.zeros_like(diag))
batch_shape = jnp.broadcast_shapes(diag_shape, loc_shape)[:-1]
dist = MultivariateNormalFromBijector(loc, scale, batch_shape)
for method in ['mean', 'median', 'mode']:
with self.subTest(method=method):
fn = self.variant(getattr(dist, method))
np.testing.assert_allclose(
fn(), jnp.broadcast_to(loc, batch_shape + loc.shape[-1:]))
@chex.all_variants(with_pmap=False)
@parameterized.named_parameters(
('no broadcast', (4,), (4,)),
('broadcasted loc', (3, 4), (4,)),
('broadcasted diag', (4,), (3, 4)),
)
def test_variance_stddev_covariance_diag(self, scale_shape, loc_shape):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
scale_diag = jax.random.normal(next(prng), scale_shape)
loc = jax.random.normal(next(prng), loc_shape)
scale = DiagAffine(diag=scale_diag, bias=jnp.zeros_like(scale_diag))
batch_shape = jnp.broadcast_shapes(scale_shape[:-1], loc_shape[:-1])
dist = MultivariateNormalFromBijector(loc, scale, batch_shape)
for method in ['variance', 'stddev', 'covariance']:
with self.subTest(method=method):
fn = self.variant(getattr(dist, method))
if method == 'variance':
expected_result = jnp.broadcast_to(
jnp.square(scale_diag), batch_shape + loc.shape[-1:])
elif method == 'stddev':
expected_result = jnp.broadcast_to(
jnp.abs(scale_diag), batch_shape + loc.shape[-1:])
elif method == 'covariance':
expected_result = jnp.broadcast_to(
jnp.vectorize(jnp.diag, signature='(k)->(k,k)')(
jnp.square(scale_diag)),
batch_shape + loc.shape[-1:] + loc.shape[-1:])
np.testing.assert_allclose(fn(), expected_result, rtol=5e-3)
@chex.all_variants(with_pmap=False)
@parameterized.named_parameters(
('no broadcast', (4, 4), (4,)),
('broadcasted loc', (3, 4, 4), (4,)),
('broadcasted diag', (4, 4), (3, 4)),
)
def test_variance_stddev_covariance_no_diag(self, scale_shape, loc_shape):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
scale_tril = jnp.tril(jax.random.normal(next(prng), scale_shape))
loc = jax.random.normal(next(prng), loc_shape)
scale = TriangularAffine(
matrix=scale_tril,
bias=jnp.zeros_like(scale_tril[..., 0]),
is_lower=True)
batch_shape = jnp.broadcast_shapes(scale_shape[:-2], loc_shape[:-1])
dist = MultivariateNormalFromBijector(loc, scale, batch_shape)
for method in ['variance', 'stddev', 'covariance']:
with self.subTest(method=method):
fn = self.variant(getattr(dist, method))
scale_tril_t = jnp.vectorize(
jnp.transpose, signature='(k,k)->(k,k)')(scale_tril)
scale_times_scale_t = jnp.matmul(scale_tril, scale_tril_t)
if method == 'variance':
expected_result = jnp.vectorize(jnp.diag, signature='(k,k)->(k)')(
scale_times_scale_t)
expected_result = jnp.broadcast_to(
expected_result, batch_shape + loc.shape[-1:])
elif method == 'stddev':
expected_result = jnp.vectorize(jnp.diag, signature='(k,k)->(k)')(
jnp.sqrt(scale_times_scale_t))
expected_result = jnp.broadcast_to(
expected_result, batch_shape + loc.shape[-1:])
elif method == 'covariance':
expected_result = jnp.broadcast_to(
scale_times_scale_t, batch_shape + scale_tril.shape[-2:])
np.testing.assert_allclose(fn(), expected_result, rtol=5e-3)
@chex.all_variants(with_pmap=False)
def test_kl_divergence_diag_distributions(self):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
scale_diag1 = 0.1 + jax.random.uniform(next(prng), (3, 4))
loc1 = jax.random.normal(next(prng), (1, 4))
dist1_distrax = MultivariateNormalFromBijector(
loc=loc1,
scale=DiagAffine(diag=scale_diag1, bias=jnp.zeros((4,))),
batch_shape=(3,),
)
dist1_tfp = tfd.MultivariateNormalDiag(
loc=loc1, scale_diag=scale_diag1)
scale_diag2 = 0.1 + jax.random.uniform(next(prng), (4,))
loc2 = jax.random.normal(next(prng), (4,))
dist2_distrax = MultivariateNormalFromBijector(<|fim▁hole|> )
dist2_tfp = tfd.MultivariateNormalDiag(
loc=loc2, scale_diag=scale_diag2)
expected_result1 = dist1_tfp.kl_divergence(dist2_tfp)
expected_result2 = dist2_tfp.kl_divergence(dist1_tfp)
for mode in ['distrax_to_distrax', 'distrax_to_tfp', 'tfp_to_distrax']:
with self.subTest(mode=mode):
if mode == 'distrax_to_distrax':
result1 = self.variant(dist1_distrax.kl_divergence)(dist2_distrax)
result2 = self.variant(dist2_distrax.kl_divergence)(dist1_distrax)
elif mode == 'distrax_to_tfp':
result1 = self.variant(dist1_distrax.kl_divergence)(dist2_tfp)
result2 = self.variant(dist2_distrax.kl_divergence)(dist1_tfp)
elif mode == 'tfp_to_distrax':
result1 = self.variant(dist1_tfp.kl_divergence)(dist2_distrax)
result2 = self.variant(dist2_tfp.kl_divergence)(dist1_distrax)
np.testing.assert_allclose(result1, expected_result1, rtol=1e-3)
np.testing.assert_allclose(result2, expected_result2, rtol=1e-3)
@chex.all_variants(with_pmap=False)
def test_kl_divergence_non_diag_distributions(self):
prng = hk.PRNGSequence(jax.random.PRNGKey(42))
scale_tril1 = jnp.tril(jax.random.normal(next(prng), (3, 4, 4)))
loc1 = jax.random.normal(next(prng), (1, 4))
dist1_distrax = MultivariateNormalFromBijector(
loc=loc1,
scale=TriangularAffine(matrix=scale_tril1, bias=jnp.zeros((4,))),
batch_shape=(3,),
)
dist1_tfp = tfd.MultivariateNormalTriL(loc=loc1, scale_tril=scale_tril1)
scale_tril2 = jnp.tril(jax.random.normal(next(prng), (4, 4)))
loc2 = jax.random.normal(next(prng), (4,))
dist2_distrax = MultivariateNormalFromBijector(
loc=loc2,
scale=TriangularAffine(matrix=scale_tril2, bias=jnp.zeros((4,))),
batch_shape=(),
)
dist2_tfp = tfd.MultivariateNormalTriL(loc=loc2, scale_tril=scale_tril2)
expected_result1 = dist1_tfp.kl_divergence(dist2_tfp)
expected_result2 = dist2_tfp.kl_divergence(dist1_tfp)
for mode in ['distrax_to_distrax', 'distrax_to_tfp', 'tfp_to_distrax']:
with self.subTest(mode=mode):
if mode == 'distrax_to_distrax':
result1 = self.variant(dist1_distrax.kl_divergence)(dist2_distrax)
result2 = self.variant(dist2_distrax.kl_divergence)(dist1_distrax)
elif mode == 'distrax_to_tfp':
result1 = self.variant(dist1_distrax.kl_divergence)(dist2_tfp)
result2 = self.variant(dist2_distrax.kl_divergence)(dist1_tfp)
elif mode == 'tfp_to_distrax':
result1 = self.variant(dist1_tfp.kl_divergence)(dist2_distrax)
result2 = self.variant(dist2_tfp.kl_divergence)(dist1_distrax)
np.testing.assert_allclose(result1, expected_result1, rtol=1e-3)
np.testing.assert_allclose(result2, expected_result2, rtol=1e-3)
def test_kl_divergence_raises_on_incompatible_distributions(self):
dim = 4
dist1 = MultivariateNormalFromBijector(
loc=jnp.zeros((dim,)),
scale=DiagAffine(diag=jnp.ones((dim,)), bias=jnp.zeros((dim,))),
batch_shape=(),
)
dim = 5
dist2 = MultivariateNormalFromBijector(
loc=jnp.zeros((dim,)),
scale=DiagAffine(diag=jnp.ones((dim,)), bias=jnp.zeros((dim,))),
batch_shape=(),
)
with self.assertRaises(ValueError):
dist1.kl_divergence(dist2)
if __name__ == '__main__':
absltest.main()<|fim▁end|> | loc=loc2,
scale=DiagAffine(diag=scale_diag2, bias=jnp.zeros((4,))),
batch_shape=(), |
<|file_name|>icheck.js<|end_file_name|><|fim▁begin|>/*!
* iCheck v1.0.1, http://git.io/arlzeA
* ===================================
* Powerful jQuery and Zepto plugin for checkboxes and radio buttons customization
*
* (c) 2013 Damir Sultanov, http://fronteed.com
* MIT Licensed
*/
(function($) {
// Cached vars
var _iCheck = 'iCheck',
_iCheckHelper = _iCheck + '-helper',
_checkbox = 'checkbox',
_radio = 'radio',
_checked = 'checked',
_unchecked = 'un' + _checked,
_disabled = 'disabled',
_determinate = 'determinate',
_indeterminate = 'in' + _determinate,
_update = 'update',
_type = 'type',
_click = 'click',
_touch = 'touchbegin.i touchend.i',
_add = 'addClass',
_remove = 'removeClass',
_callback = 'trigger',
_label = 'label',
_cursor = 'cursor',
_mobile = /ipad|iphone|ipod|android|blackberry|windows phone|opera mini|silk/i.test(navigator.userAgent);
// Plugin init
$.fn[_iCheck] = function(options, fire) {
// Walker
var handle = 'input[type="' + _checkbox + '"], input[type="' + _radio + '"]',
stack = $(),
walker = function(object) {
object.each(function() {
var self = $(this);
if (self.is(handle)) {
stack = stack.add(self);
} else {
stack = stack.add(self.find(handle));
};
});
};
// Check if we should operate with some method
if (/^(check|uncheck|toggle|indeterminate|determinate|disable|enable|update|destroy)$/i.test(options)) {
// Normalize method's name
options = options.toLowerCase();
// Find checkboxes and radio buttons
walker(this);
return stack.each(function() {
var self = $(this);
if (options == 'destroy') {
tidy(self, 'ifDestroyed');
} else {
operate(self, true, options);
};
// Fire method's callback
if ($.isFunction(fire)) {
fire();
};
});
// Customization
} else if (typeof options == 'object' || !options) {
// Check if any options were passed
var settings = $.extend({
checkedClass: _checked,
disabledClass: _disabled,
indeterminateClass: _indeterminate,
labelHover: true,
aria: false
}, options),
selector = settings.handle,
hoverClass = settings.hoverClass || 'hover',
focusClass = settings.focusClass || 'focus',
activeClass = settings.activeClass || 'active',
labelHover = !!settings.labelHover,
labelHoverClass = settings.labelHoverClass || 'hover',
// Setup clickable area
area = ('' + settings.increaseArea).replace('%', '') | 0;
// Selector limit
if (selector == _checkbox || selector == _radio) {
handle = 'input[type="' + selector + '"]';
};
// Clickable area limit
if (area < -50) {
area = -50;
};
// Walk around the selector
walker(this);
return stack.each(function() {
var self = $(this);
// If already customized
tidy(self);
var node = this,
id = node.id,
// Layer styles
offset = -area + '%',
size = 100 + (area * 2) + '%',
layer = {
position: 'absolute',
top: offset,
left: offset,
display: 'block',
width: size,
height: size,
margin: 0,
padding: 0,
background: '#fff',
border: 0,
opacity: 0
},
// Choose how to hide input
hide = _mobile ? {
position: 'absolute',
visibility: 'hidden'
} : area ? layer : {
position: 'absolute',
opacity: 0
},
// Get proper class
className = node[_type] == _checkbox ? settings.checkboxClass || 'i' + _checkbox : settings.radioClass || 'i' + _radio,
// Find assigned labels
label = $(_label + '[for="' + id + '"]').add(self.closest(_label)),
// Check ARIA option
aria = !!settings.aria,
// Set ARIA placeholder
ariaID = _iCheck + '-' + Math.random().toString(36).substr(2,6),
// Parent & helper
parent = '<div class="' + className + '" ' + (aria ? 'role="' + node[_type] + '" ' : ''),
helper;
// Set ARIA "labelledby"
if (aria) {
label.each(function() {
parent += 'aria-labelledby="';
if (this.id) {
parent += this.id;
} else {
this.id = ariaID;
parent += ariaID;
}
parent += '"';
});
};
// Wrap input
parent = self.wrap(parent + '/>')[_callback]('ifCreated').parent().append(settings.insert);
// Layer addition
helper = $('<ins class="' + _iCheckHelper + '"/>').css(layer).appendTo(parent);
// Finalize customization
self.data(_iCheck, {o: settings, s: self.attr('style')}).css(hide);
!!settings.inheritClass && parent[_add](node.className || '');
!!settings.inheritID && id && parent.attr('id', _iCheck + '-' + id);
parent.css('position') == 'static' && parent.css('position', 'relative');
operate(self, true, _update);
// Label events
if (label.length) {
label.on(_click + '.i mouseover.i mouseout.i ' + _touch, function(event) {
var type = event[_type],
item = $(this);
// Do nothing if input is disabled
if (!node[_disabled]) {
// Click
if (type == _click) {
if ($(event.target).is('a')) {
return;
}
operate(self, false, true);
// Hover state
} else if (labelHover) {
// mouseout|touchend
if (/ut|nd/.test(type)) {
parent[_remove](hoverClass);
item[_remove](labelHoverClass);
} else {
parent[_add](hoverClass);
item[_add](labelHoverClass);
};
};
if (_mobile) {
event.stopPropagation();
} else {
return false;
};
};
});
};
// Input events
self.on(_click + '.i focus.i blur.i keyup.i keydown.i keypress.i', function(event) {
var type = event[_type],
key = event.keyCode;
// Click
if (type == _click) {
return false;
// Keydown
} else if (type == 'keydown' && key == 32) {
if (!(node[_type] == _radio && node[_checked])) {
if (node[_checked]) {
off(self, _checked);
} else {
on(self, _checked);
};
};
return false;
// Keyup
} else if (type == 'keyup' && node[_type] == _radio) {
!node[_checked] && on(self, _checked);
// Focus/blur
} else if (/us|ur/.test(type)) {
parent[type == 'blur' ? _remove : _add](focusClass);
};
});
// Helper events
helper.on(_click + ' mousedown mouseup mouseover mouseout ' + _touch, function(event) {
var type = event[_type],
// mousedown|mouseup
toggle = /wn|up/.test(type) ? activeClass : hoverClass;
// Do nothing if input is disabled
if (!node[_disabled]) {
// Click
if (type == _click) {
operate(self, false, true);
// Active and hover states
} else {
// State is on
if (/wn|er|in/.test(type)) {
// mousedown|mouseover|touchbegin
parent[_add](toggle);
// State is off
} else {
parent[_remove](toggle + ' ' + activeClass);
};
// Label hover
if (label.length && labelHover && toggle == hoverClass) {
// mouseout|touchend
label[/ut|nd/.test(type) ? _remove : _add](labelHoverClass);
};
};
if (_mobile) {
event.stopPropagation();
} else {
return false;
};
};
});
});
} else {
return this;
};
};
// Do something with inputs
function operate(input, direct, method) {
var node = input[0],
state = /er/.test(method) ? _indeterminate : /bl/.test(method) ? _disabled : _checked,
active = method == _update ? {
checked: node[_checked],
disabled: node[_disabled],
indeterminate: input.attr(_indeterminate) == 'true' || input.attr(_determinate) == 'false'
} : node[state];
// Check, disable or indeterminate
if (/^(ch|di|in)/.test(method) && !active) {
on(input, state);
// Uncheck, enable or determinate
} else if (/^(un|en|de)/.test(method) && active) {
off(input, state);
// Update
} else if (method == _update) {
// Handle states
for (var state in active) {
if (active[state]) {
on(input, state, true);
} else {
off(input, state, true);
};
};
} else if (!direct || method == 'toggle') {
// Helper or label was clicked
if (!direct) {
input[_callback]('ifClicked');
};
// Toggle checked state
if (active) {
if (node[_type] !== _radio) {
off(input, state);
};
} else {
on(input, state);
};
};
};
// Add checked, disabled or indeterminate state
function on(input, state, keep) {
var node = input[0],
parent = input.parent(),
checked = state == _checked,
indeterminate = state == _indeterminate,
disabled = state == _disabled,
callback = indeterminate ? _determinate : checked ? _unchecked : 'enabled',
regular = option(input, callback + capitalize(node[_type])),
specific = option(input, state + capitalize(node[_type]));
// Prevent unnecessary actions
if (node[state] !== true) {
// Toggle assigned radio buttons
if (!keep && state == _checked && node[_type] == _radio && node.name) {
var form = input.closest('form'),
inputs = 'input[name="' + node.name + '"]';
inputs = form.length ? form.find(inputs) : $(inputs);
inputs.each(function() {
if (this !== node && $(this).data(_iCheck)) {
off($(this), state);
};
});
};
// Indeterminate state
if (indeterminate) {
// Add indeterminate state
node[state] = true;
// Remove checked state
if (node[_checked]) {
off(input, _checked, 'force');
};
// Checked or disabled state
} else {
// Add checked or disabled state
if (!keep) {
node[state] = true;
};
// Remove indeterminate state
if (checked && node[_indeterminate]) {
off(input, _indeterminate, false);
};
};
// Trigger callbacks
callbacks(input, checked, state, keep);
};
// Add proper cursor
if (node[_disabled] && !!option(input, _cursor, true)) {
parent.find('.' + _iCheckHelper).css(_cursor, 'default');
};
// Add state class
parent[_add](specific || option(input, state) || '');
// Set ARIA attribute
disabled ? parent.attr('aria-disabled', 'true') : parent.attr('aria-checked', indeterminate ? 'mixed' : 'true');
// Remove regular state class
parent[_remove](regular || option(input, callback) || '');
};
// Remove checked, disabled or indeterminate state
function off(input, state, keep) {
var node = input[0],
parent = input.parent(),
checked = state == _checked,
indeterminate = state == _indeterminate,
disabled = state == _disabled,
callback = indeterminate ? _determinate : checked ? _unchecked : 'enabled',
regular = option(input, callback + capitalize(node[_type])),
specific = option(input, state + capitalize(node[_type]));
// Prevent unnecessary actions
if (node[state] !== false) {
// Toggle state
if (indeterminate || !keep || keep == 'force') {
node[state] = false;
};
// Trigger callbacks
callbacks(input, checked, callback, keep);
};
// Add proper cursor
if (!node[_disabled] && !!option(input, _cursor, true)) {
parent.find('.' + _iCheckHelper).css(_cursor, 'pointer');
};
// Remove state class
parent[_remove](specific || option(input, state) || '');
// Set ARIA attribute
disabled ? parent.attr('aria-disabled', 'false') : parent.attr('aria-checked', 'false');
// Add regular state class
parent[_add](regular || option(input, callback) || '');
};
// Remove all traces
function tidy(input, callback) {
if (input.data(_iCheck)) {<|fim▁hole|> input.parent().html(input.attr('style', input.data(_iCheck).s || ''));
// Callback
if (callback) {
input[_callback](callback);
};
// Unbind events
input.off('.i').unwrap();
$(_label + '[for="' + input[0].id + '"]').add(input.closest(_label)).off('.i');
};
};
// Get some option
function option(input, state, regular) {
if (input.data(_iCheck)) {
return input.data(_iCheck).o[state + (regular ? '' : 'Class')];
};
};
// Capitalize some string
function capitalize(string) {
return string.charAt(0).toUpperCase() + string.slice(1);
};
// Executable handlers
function callbacks(input, checked, callback, keep) {
if (!keep) {
if (checked) {
input[_callback]('ifToggled');
};
input[_callback]('ifChanged')[_callback]('if' + capitalize(callback));
};
};
})(window.jQuery || window.Zepto);<|fim▁end|> |
// Remove everything except input |
<|file_name|>PrintStatus.py<|end_file_name|><|fim▁begin|># encoding: utf-8<|fim▁hole|># module gtk._gtk
# from /usr/lib/python2.7/dist-packages/gtk-2.0/gtk/_gtk.so
# by generator 1.135
# no doc
# imports
import atk as __atk
import gio as __gio
import gobject as __gobject
import gobject._gobject as __gobject__gobject
class PrintStatus(__gobject.GEnum):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
__dict__ = None # (!) real value is ''
__enum_values__ = {
0: 0,
1: 1,
2: 2,
3: 3,
4: 4,
5: 5,
6: 6,
7: 7,
8: 8,
}
__gtype__ = None # (!) real value is ''<|fim▁end|> | |
<|file_name|>detector.go<|end_file_name|><|fim▁begin|>package light
import (
"bytes"
"context"
"errors"
"fmt"
"time"
"github.com/tendermint/tendermint/light/provider"
"github.com/tendermint/tendermint/types"
)
// The detector component of the light client detects and handles attacks on the light client.
// More info here:
// tendermint/docs/architecture/adr-047-handling-evidence-from-light-client.md
// detectDivergence is a second wall of defense for the light client.
//
// It takes the target verified header and compares it with the headers of a set of
// witness providers that the light client is connected to. If a conflicting header
// is returned it verifies and examines the conflicting header against the verified
// trace that was produced from the primary. If successful, it produces two sets of evidence
// and sends them to the opposite provider before halting.
//
// If there are no conflictinge headers, the light client deems the verified target header
// trusted and saves it to the trusted store.
func (c *Client) detectDivergence(ctx context.Context, primaryTrace []*types.LightBlock, now time.Time) error {
c.providerMutex.Lock()
defer c.providerMutex.Unlock()
if len(c.witnesses) < 1 {
return nil
}
if primaryTrace == nil || len(primaryTrace) < 2 {
return errors.New("nil or single block primary trace")
}
var (
headerMatched bool
lastVerifiedHeader = primaryTrace[len(primaryTrace)-1].SignedHeader
witnessesToRemove = make([]int, 0)
)
c.logger.Debug("running detector against trace", "finalizeBlockHeight", lastVerifiedHeader.Height,
"finalizeBlockHash", lastVerifiedHeader.Hash, "length", len(primaryTrace))
// launch one goroutine per witness to retrieve the light block of the target height
// and compare it with the header from the primary
errc := make(chan error, len(c.witnesses))
for i, witness := range c.witnesses {
go c.compareNewHeaderWithWitness(ctx, errc, lastVerifiedHeader, witness, i)
}
// handle errors from the header comparisons as they come in
for i := 0; i < cap(errc); i++ {
err := <-errc
switch e := err.(type) {
case nil: // at least one header matched
headerMatched = true
case errConflictingHeaders:
// We have conflicting headers. This could possibly imply an attack on the light client.
// First we need to verify the witness's header using the same skipping verification and then we
// need to find the point that the headers diverge and examine this for any evidence of an attack.
//
// We combine these actions together, verifying the witnesses headers and outputting the trace<|fim▁hole|> // return information of the attack
return err
}
// if attempt to generate conflicting headers failed then remove witness
witnessesToRemove = append(witnessesToRemove, e.WitnessIndex)
case errBadWitness:
c.logger.Info("witness returned an error during header comparison, removing...",
"witness", c.witnesses[e.WitnessIndex], "err", err)
witnessesToRemove = append(witnessesToRemove, e.WitnessIndex)
default:
if errors.Is(e, context.Canceled) || errors.Is(e, context.DeadlineExceeded) {
return e
}
c.logger.Info("error in light block request to witness", "err", err)
}
}
// remove witnesses that have misbehaved
if err := c.removeWitnesses(witnessesToRemove); err != nil {
return err
}
// 1. If we had at least one witness that returned the same header then we
// conclude that we can trust the header
if headerMatched {
return nil
}
// 2. Else all witnesses have either not responded, don't have the block or sent invalid blocks.
return ErrFailedHeaderCrossReferencing
}
// compareNewHeaderWithWitness takes the verified header from the primary and compares it with a
// header from a specified witness. The function can return one of three errors:
//
// 1: errConflictingHeaders -> there may have been an attack on this light client
// 2: errBadWitness -> the witness has either not responded, doesn't have the header or has given us an invalid one
// Note: In the case of an invalid header we remove the witness
// 3: nil -> the hashes of the two headers match
func (c *Client) compareNewHeaderWithWitness(ctx context.Context, errc chan error, h *types.SignedHeader,
witness provider.Provider, witnessIndex int) {
lightBlock, err := c.getLightBlock(ctx, witness, h.Height)
switch err {
// no error means we move on to checking the hash of the two headers
case nil:
break
// the witness hasn't been helpful in comparing headers, we mark the response and continue
// comparing with the rest of the witnesses
case provider.ErrNoResponse, provider.ErrLightBlockNotFound, context.DeadlineExceeded, context.Canceled:
errc <- err
return
// the witness' head of the blockchain is lower than the height of the primary. This could be one of
// two things:
// 1) The witness is lagging behind
// 2) The primary may be performing a lunatic attack with a height and time in the future
case provider.ErrHeightTooHigh:
// The light client now asks for the latest header that the witness has
var isTargetHeight bool
isTargetHeight, lightBlock, err = c.getTargetBlockOrLatest(ctx, h.Height, witness)
if err != nil {
if c.providerShouldBeRemoved(err) {
errc <- errBadWitness{Reason: err, WitnessIndex: witnessIndex}
} else {
errc <- err
}
return
}
// if the witness caught up and has returned a block of the target height then we can
// break from this switch case and continue to verify the hashes
if isTargetHeight {
break
}
// witness' last header is below the primary's header. We check the times to see if the blocks
// have conflicting times
if !lightBlock.Time.Before(h.Time) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
return
}
// the witness is behind. We wait for a period WAITING = 2 * DRIFT + LAG.
// This should give the witness ample time if it is a participating member
// of consensus to produce a block that has a time that is after the primary's
// block time. If not the witness is too far behind and the light client removes it
time.Sleep(2*c.maxClockDrift + c.maxBlockLag)
isTargetHeight, lightBlock, err = c.getTargetBlockOrLatest(ctx, h.Height, witness)
if err != nil {
if c.providerShouldBeRemoved(err) {
errc <- errBadWitness{Reason: err, WitnessIndex: witnessIndex}
} else {
errc <- err
}
return
}
if isTargetHeight {
break
}
// the witness still doesn't have a block at the height of the primary.
// Check if there is a conflicting time
if !lightBlock.Time.Before(h.Time) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
return
}
// Following this request response procedure, the witness has been unable to produce a block
// that can somehow conflict with the primary's block. We thus conclude that the witness
// is too far behind and thus we return a no response error.
//
// NOTE: If the clock drift / lag has been miscalibrated it is feasible that the light client has
// drifted too far ahead for any witness to be able provide a comparable block and thus may allow
// for a malicious primary to attack it
errc <- provider.ErrNoResponse
return
default:
// all other errors (i.e. invalid block, closed connection or unreliable provider) we mark the
// witness as bad and remove it
errc <- errBadWitness{Reason: err, WitnessIndex: witnessIndex}
return
}
if !bytes.Equal(h.Header.Hash(), lightBlock.Header.Hash()) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
}
c.logger.Debug("matching header received by witness", "height", h.Height, "witness", witnessIndex)
errc <- nil
}
// sendEvidence sends evidence to a provider on a best effort basis.
func (c *Client) sendEvidence(ctx context.Context, ev *types.LightClientAttackEvidence, receiver provider.Provider) {
err := receiver.ReportEvidence(ctx, ev)
if err != nil {
c.logger.Error("failed to report evidence to provider", "ev", ev, "provider", receiver)
}
}
// handleConflictingHeaders handles the primary style of attack, which is where a primary and witness have
// two headers of the same height but with different hashes
func (c *Client) handleConflictingHeaders(
ctx context.Context,
primaryTrace []*types.LightBlock,
challendingBlock *types.LightBlock,
witnessIndex int,
now time.Time,
) error {
supportingWitness := c.witnesses[witnessIndex]
witnessTrace, primaryBlock, err := c.examineConflictingHeaderAgainstTrace(
ctx,
primaryTrace,
challendingBlock,
supportingWitness,
now,
)
if err != nil {
c.logger.Info("error validating witness's divergent header", "witness", supportingWitness, "err", err)
return nil
}
// We are suspecting that the primary is faulty, hence we hold the witness as the source of truth
// and generate evidence against the primary that we can send to the witness
commonBlock, trustedBlock := witnessTrace[0], witnessTrace[len(witnessTrace)-1]
evidenceAgainstPrimary := newLightClientAttackEvidence(primaryBlock, trustedBlock, commonBlock)
c.logger.Error("ATTEMPTED ATTACK DETECTED. Sending evidence againt primary by witness", "ev", evidenceAgainstPrimary,
"primary", c.primary, "witness", supportingWitness)
c.sendEvidence(ctx, evidenceAgainstPrimary, supportingWitness)
if primaryBlock.Commit.Round != witnessTrace[len(witnessTrace)-1].Commit.Round {
c.logger.Info("The light client has detected, and prevented, an attempted amnesia attack." +
" We think this attack is pretty unlikely, so if you see it, that's interesting to us." +
" Can you let us know by opening an issue through https://github.com/tendermint/tendermint/issues/new?")
}
// This may not be valid because the witness itself is at fault. So now we reverse it, examining the
// trace provided by the witness and holding the primary as the source of truth. Note: primary may not
// respond but this is okay as we will halt anyway.
primaryTrace, witnessBlock, err := c.examineConflictingHeaderAgainstTrace(
ctx,
witnessTrace,
primaryBlock,
c.primary,
now,
)
if err != nil {
c.logger.Info("error validating primary's divergent header", "primary", c.primary, "err", err)
return ErrLightClientAttack
}
// We now use the primary trace to create evidence against the witness and send it to the primary
commonBlock, trustedBlock = primaryTrace[0], primaryTrace[len(primaryTrace)-1]
evidenceAgainstWitness := newLightClientAttackEvidence(witnessBlock, trustedBlock, commonBlock)
c.logger.Error("Sending evidence against witness by primary", "ev", evidenceAgainstWitness,
"primary", c.primary, "witness", supportingWitness)
c.sendEvidence(ctx, evidenceAgainstWitness, c.primary)
// We return the error and don't process anymore witnesses
return ErrLightClientAttack
}
// examineConflictingHeaderAgainstTrace takes a trace from one provider and a divergent header that
// it has received from another and preforms verifySkipping at the heights of each of the intermediate
// headers in the trace until it reaches the divergentHeader. 1 of 2 things can happen.
//
// 1. The light client verifies a header that is different to the intermediate header in the trace. This
// is the bifurcation point and the light client can create evidence from it
// 2. The source stops responding, doesn't have the block or sends an invalid header in which case we
// return the error and remove the witness
//
// CONTRACT:
// 1. Trace can not be empty len(trace) > 0
// 2. The last block in the trace can not be of a lower height than the target block
// trace[len(trace)-1].Height >= targetBlock.Height
// 3. The
func (c *Client) examineConflictingHeaderAgainstTrace(
ctx context.Context,
trace []*types.LightBlock,
targetBlock *types.LightBlock,
source provider.Provider, now time.Time,
) ([]*types.LightBlock, *types.LightBlock, error) {
var (
previouslyVerifiedBlock, sourceBlock *types.LightBlock
sourceTrace []*types.LightBlock
err error
)
if targetBlock.Height < trace[0].Height {
return nil, nil, fmt.Errorf("target block has a height lower than the trusted height (%d < %d)",
targetBlock.Height, trace[0].Height)
}
for idx, traceBlock := range trace {
// this case only happens in a forward lunatic attack. We treat the block with the
// height directly after the targetBlock as the divergent block
if traceBlock.Height > targetBlock.Height {
// sanity check that the time of the traceBlock is indeed less than that of the targetBlock. If the trace
// was correctly verified we should expect monotonically increasing time. This means that if the block at
// the end of the trace has a lesser time than the target block then all blocks in the trace should have a
// lesser time
if traceBlock.Time.After(targetBlock.Time) {
return nil, nil,
errors.New("sanity check failed: expected traceblock to have a lesser time than the target block")
}
// before sending back the divergent block and trace we need to ensure we have verified
// the final gap between the previouslyVerifiedBlock and the targetBlock
if previouslyVerifiedBlock.Height != targetBlock.Height {
sourceTrace, err = c.verifySkipping(ctx, source, previouslyVerifiedBlock, targetBlock, now)
if err != nil {
return nil, nil, fmt.Errorf("verifySkipping of conflicting header failed: %w", err)
}
}
return sourceTrace, traceBlock, nil
}
// get the corresponding block from the source to verify and match up against the traceBlock
if traceBlock.Height == targetBlock.Height {
sourceBlock = targetBlock
} else {
sourceBlock, err = c.getLightBlock(ctx, source, traceBlock.Height)
if err != nil {
return nil, nil, fmt.Errorf("failed to examine trace: %w", err)
}
}
// The first block in the trace MUST be the same to the light block that the source produces
// else we cannot continue with verification.
if idx == 0 {
if shash, thash := sourceBlock.Hash(), traceBlock.Hash(); !bytes.Equal(shash, thash) {
return nil, nil, fmt.Errorf("trusted block is different to the source's first block (%X = %X)",
thash, shash)
}
previouslyVerifiedBlock = sourceBlock
continue
}
// we check that the source provider can verify a block at the same height of the
// intermediate height
sourceTrace, err = c.verifySkipping(ctx, source, previouslyVerifiedBlock, sourceBlock, now)
if err != nil {
return nil, nil, fmt.Errorf("verifySkipping of conflicting header failed: %w", err)
}
// check if the headers verified by the source has diverged from the trace
if shash, thash := sourceBlock.Hash(), traceBlock.Hash(); !bytes.Equal(shash, thash) {
// Bifurcation point found!
return sourceTrace, traceBlock, nil
}
// headers are still the same. update the previouslyVerifiedBlock
previouslyVerifiedBlock = sourceBlock
}
// We have reached the end of the trace. This should never happen. This can only happen if one of the stated
// prerequisites to this function were not met. Namely that either trace[len(trace)-1].Height < targetBlock.Height
// or that trace[i].Hash() != targetBlock.Hash()
return nil, nil, errNoDivergence
}
// getTargetBlockOrLatest gets the latest height, if it is greater than the target height then it queries
// the target heght else it returns the latest. returns true if it successfully managed to acquire the target
// height.
func (c *Client) getTargetBlockOrLatest(
ctx context.Context,
height int64,
witness provider.Provider,
) (bool, *types.LightBlock, error) {
lightBlock, err := c.getLightBlock(ctx, witness, 0)
if err != nil {
return false, nil, err
}
if lightBlock.Height == height {
// the witness has caught up to the height of the provider's signed header. We
// can resume with checking the hashes.
return true, lightBlock, nil
}
if lightBlock.Height > height {
// the witness has caught up. We recursively call the function again. However in order
// to avoud a wild goose chase where the witness sends us one header below and one header
// above the height we set a timeout to the context
lightBlock, err := c.getLightBlock(ctx, witness, height)
return true, lightBlock, err
}
return false, lightBlock, nil
}
// newLightClientAttackEvidence determines the type of attack and then forms the evidence filling out
// all the fields such that it is ready to be sent to a full node.
func newLightClientAttackEvidence(conflicted, trusted, common *types.LightBlock) *types.LightClientAttackEvidence {
ev := &types.LightClientAttackEvidence{ConflictingBlock: conflicted}
// We use the common height to indicate the form of the attack.
// if this is an equivocation or amnesia attack, i.e. the validator sets are the same, then we
// return the height of the conflicting block as the common height. If instead it is a lunatic
// attack and the validator sets are not the same then we send the height of the common header.
if ev.ConflictingHeaderIsInvalid(trusted.Header) {
ev.CommonHeight = common.Height
ev.Timestamp = common.Time
ev.TotalVotingPower = common.ValidatorSet.TotalVotingPower()
} else {
ev.CommonHeight = trusted.Height
ev.Timestamp = trusted.Time
ev.TotalVotingPower = trusted.ValidatorSet.TotalVotingPower()
}
ev.ByzantineValidators = ev.GetByzantineValidators(common.ValidatorSet, trusted.SignedHeader)
return ev
}<|fim▁end|> | // which captures the bifurcation point and if successful provides the information to create valid evidence.
err := c.handleConflictingHeaders(ctx, primaryTrace, e.Block, e.WitnessIndex, now)
if err != nil { |
<|file_name|>053.py<|end_file_name|><|fim▁begin|># 053. Maximum Subarray
# The simple O(n) solution.
import unittest
class Solution(object):
def maxSubArray(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
ret = nums[0]
pre = nums[0]
for i in nums[1:]:
if ret < i and ret < 0:
ret = pre = i
continue
cur = pre + i
if ret < cur:
ret = pre = cur
continue
if cur >= 0:
pre = cur
continue
# if cur < 0: # Better start over.
pre = 0
return ret
class SolutionUnitTest(unittest.TestCase):<|fim▁hole|> def testMaxSubArray(self):
s = Solution()
self.assertEqual(s.maxSubArray([-2,1,-3,4,-1,2,1,-5,4]), 6)
self.assertEqual(s.maxSubArray([-2,1]), 1)
self.assertEqual(s.maxSubArray([-1]), -1)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | def setup(self):
pass
def tearDown(self):
pass |
<|file_name|>jack_utils.rs<|end_file_name|><|fim▁begin|>use jack_sys as j;
use std::ffi;
/// Collects strings from an array of c-strings into a Rust vector of strings
/// and frees the memory pointed to by `ptr`. The end of the array is marked by
/// the value of the c-string being the null pointer. `ptr` may be `null`, in
/// which case nothing (deallocating) is done and an empty vector is returned.
pub unsafe fn collect_strs(ptr: *const *const libc::c_char) -> Vec<String> {
if ptr.is_null() {
return Vec::new();
};
let len = {
let mut len = 0;
while !(*ptr.offset(len)).is_null() {
len += 1;<|fim▁hole|> len
};
let mut strs = Vec::with_capacity(len as usize);
for i in 0..len {
let cstr_ptr = *ptr.offset(i);
let s = ffi::CStr::from_ptr(cstr_ptr).to_string_lossy().into_owned();
strs.push(s);
}
j::jack_free(ptr as *mut ::libc::c_void);
strs
}<|fim▁end|> | } |
<|file_name|>CellularAutomaton2d.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2015 Lukasz Hryniuk
This file is part of toys.
toys is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
toys is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.<|fim▁hole|>
#include "CellularAutomaton2d.hpp"
#include "../utils/Cell2d.hpp"
CellularAutomaton2d::CellularAutomaton2d(state_t number_of_states,
int width,
int height)
: CellularAutomaton(number_of_states),
width_{width},
height_{height},
states_{std::vector<std::vector<state_t>>(height, std::vector<state_t>(width, 0))}
{ }
void CellularAutomaton2d::Reset()
{
states_.clear();
states_ = std::vector<std::vector<state_t>>(height_, std::vector<state_t>(width_, 0));
}
std::vector<std::unique_ptr<Atom>> CellularAutomaton2d::GetInitialState() const
{
std::vector<std::unique_ptr<Atom>> atoms;
for (auto y = 0u; y < unsigned(height_); ++y) {
for (auto x = 0u; x < unsigned(width_); ++x) {
if (states_[y][x] != 0) {
atoms.push_back(std::make_unique<Cell2d>(x, y, states_[y][x]));
}
}
}
return atoms;
}
int CellularAutomaton2d::GetNumberOfAtomsWithState(state_t state) const
{
int nbr_of_atoms{};
for (const auto& row : states_) {
for (const auto& s : row) {
if (s == state) {
++nbr_of_atoms;
}
}
}
return nbr_of_atoms;
}
state_t CellularAutomaton2d::GetStateAt(int x, int y)
{
return states_[y][x];
}
void CellularAutomaton2d::SetStateAt(int x, int y, state_t new_state)
{
states_[y][x] = new_state;
}
std::vector<std::unique_ptr<Atom>> CellularAutomaton2d::Step(int)
{
return std::vector<std::unique_ptr<Atom>>{};
}
std::vector<std::unique_ptr<Atom>> CellularAutomaton2d::GetAllAtoms() const {
std::vector<std::unique_ptr<Atom>> atoms;
for (auto y = 0u; y < unsigned(height_); ++y) {
for (auto x = 0u; x < unsigned(width_); ++x) {
atoms.push_back(std::make_unique<Cell2d>(x, y, states_[y][x]));
}
}
return atoms;
}<|fim▁end|> |
You should have received a copy of the GNU General Public License
along with toys. If not, see <http://www.gnu.org/licenses/>.
*/ |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
A sub-package for efficiently dealing with polynomials.
Within the documentation for this sub-package, a "finite power series,"
i.e., a polynomial (also referred to simply as a "series") is represented
by a 1-D numpy array of the polynomial's coefficients, ordered from lowest
order term to highest. For example, array([1,2,3]) represents
``P_0 + 2*P_1 + 3*P_2``, where P_n is the n-th order basis polynomial
applicable to the specific module in question, e.g., `polynomial` (which
"wraps" the "standard" basis) or `chebyshev`. For optimal performance,
all operations on polynomials, including evaluation at an argument, are
implemented as operations on the coefficients. Additional (module-specific)
information can be found in the docstring for the module of interest.
This package provides *convenience classes* for each of six different kinds
of polynomials:
======================== ================
**Name** **Provides**
======================== ================
`~polynomial.Polynomial` Power series
`~chebyshev.Chebyshev` Chebyshev series
`~legendre.Legendre` Legendre series
`~laguerre.Laguerre` Laguerre series
`~hermite.Hermite` Hermite series
`~hermite_e.HermiteE` HermiteE series
======================== ================
These *convenience classes* provide a consistent interface for creating,
manipulating, and fitting data with polynomials of different bases.
The convenience classes are the preferred interface for the `~numpy.polynomial`
package, and are available from the ``numpy.polynomial`` namespace.
This eliminates the need to navigate to the corresponding submodules, e.g.
``np.polynomial.Polynomial`` or ``np.polynomial.Chebyshev`` instead of<|fim▁hole|>The classes provide a more consistent and concise interface than the
type-specific functions defined in the submodules for each type of polynomial.
For example, to fit a Chebyshev polynomial with degree ``1`` to data given
by arrays ``xdata`` and ``ydata``, the
`~chebyshev.Chebyshev.fit` class method::
>>> from numpy.polynomial import Chebyshev
>>> c = Chebyshev.fit(xdata, ydata, deg=1)
is preferred over the `chebyshev.chebfit` function from the
``np.polynomial.chebyshev`` module::
>>> from numpy.polynomial.chebyshev import chebfit
>>> c = chebfit(xdata, ydata, deg=1)
See :doc:`routines.polynomials.classes` for more details.
Convenience Classes
===================
The following lists the various constants and methods common to all of
the classes representing the various kinds of polynomials. In the following,
the term ``Poly`` represents any one of the convenience classes (e.g.
`~polynomial.Polynomial`, `~chebyshev.Chebyshev`, `~hermite.Hermite`, etc.)
while the lowercase ``p`` represents an **instance** of a polynomial class.
Constants
---------
- ``Poly.domain`` -- Default domain
- ``Poly.window`` -- Default window
- ``Poly.basis_name`` -- String used to represent the basis
- ``Poly.maxpower`` -- Maximum value ``n`` such that ``p**n`` is allowed
- ``Poly.nickname`` -- String used in printing
Creation
--------
Methods for creating polynomial instances.
- ``Poly.basis(degree)`` -- Basis polynomial of given degree
- ``Poly.identity()`` -- ``p`` where ``p(x) = x`` for all ``x``
- ``Poly.fit(x, y, deg)`` -- ``p`` of degree ``deg`` with coefficients
determined by the least-squares fit to the data ``x``, ``y``
- ``Poly.fromroots(roots)`` -- ``p`` with specified roots
- ``p.copy()`` -- Create a copy of ``p``
Conversion
----------
Methods for converting a polynomial instance of one kind to another.
- ``p.cast(Poly)`` -- Convert ``p`` to instance of kind ``Poly``
- ``p.convert(Poly)`` -- Convert ``p`` to instance of kind ``Poly`` or map
between ``domain`` and ``window``
Calculus
--------
- ``p.deriv()`` -- Take the derivative of ``p``
- ``p.integ()`` -- Integrate ``p``
Validation
----------
- ``Poly.has_samecoef(p1, p2)`` -- Check if coefficients match
- ``Poly.has_samedomain(p1, p2)`` -- Check if domains match
- ``Poly.has_sametype(p1, p2)`` -- Check if types match
- ``Poly.has_samewindow(p1, p2)`` -- Check if windows match
Misc
----
- ``p.linspace()`` -- Return ``x, p(x)`` at equally-spaced points in ``domain``
- ``p.mapparms()`` -- Return the parameters for the linear mapping between
``domain`` and ``window``.
- ``p.roots()`` -- Return the roots of `p`.
- ``p.trim()`` -- Remove trailing coefficients.
- ``p.cutdeg(degree)`` -- Truncate p to given degree
- ``p.truncate(size)`` -- Truncate p to given size
"""
from .polynomial import Polynomial
from .chebyshev import Chebyshev
from .legendre import Legendre
from .hermite import Hermite
from .hermite_e import HermiteE
from .laguerre import Laguerre
__all__ = [
"set_default_printstyle",
"polynomial", "Polynomial",
"chebyshev", "Chebyshev",
"legendre", "Legendre",
"hermite", "Hermite",
"hermite_e", "HermiteE",
"laguerre", "Laguerre",
]
def set_default_printstyle(style):
"""
Set the default format for the string representation of polynomials.
Values for ``style`` must be valid inputs to ``__format__``, i.e. 'ascii'
or 'unicode'.
Parameters
----------
style : str
Format string for default printing style. Must be either 'ascii' or
'unicode'.
Notes
-----
The default format depends on the platform: 'unicode' is used on
Unix-based systems and 'ascii' on Windows. This determination is based on
default font support for the unicode superscript and subscript ranges.
Examples
--------
>>> p = np.polynomial.Polynomial([1, 2, 3])
>>> c = np.polynomial.Chebyshev([1, 2, 3])
>>> np.polynomial.set_default_printstyle('unicode')
>>> print(p)
1.0 + 2.0·x¹ + 3.0·x²
>>> print(c)
1.0 + 2.0·T₁(x) + 3.0·T₂(x)
>>> np.polynomial.set_default_printstyle('ascii')
>>> print(p)
1.0 + 2.0 x**1 + 3.0 x**2
>>> print(c)
1.0 + 2.0 T_1(x) + 3.0 T_2(x)
>>> # Formatting supersedes all class/package-level defaults
>>> print(f"{p:unicode}")
1.0 + 2.0·x¹ + 3.0·x²
"""
if style not in ('unicode', 'ascii'):
raise ValueError(
f"Unsupported format string '{style}'. Valid options are 'ascii' "
f"and 'unicode'"
)
_use_unicode = True
if style == 'ascii':
_use_unicode = False
from ._polybase import ABCPolyBase
ABCPolyBase._use_unicode = _use_unicode
from numpy._pytesttester import PytestTester
test = PytestTester(__name__)
del PytestTester<|fim▁end|> | ``np.polynomial.polynomial.Polynomial`` or
``np.polynomial.chebyshev.Chebyshev``, respectively. |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.http import HttpResponseRedirect, JsonResponse
from django.views.generic import CreateView, UpdateView
from django.contrib.messages.views import SuccessMessageMixin
from .models import HistoriaClinica, Patologia
from .forms import HistoriaClinicaForms
from apps.afiliados.models import Titular, Adherente
from apps.personas.models import Persona
class HistoriaClinicaCreate(SuccessMessageMixin, CreateView):
model = HistoriaClinica
form_class = HistoriaClinicaForms
template_name = 'historias_clinicas/historia_clinica_form.html'
success_url = '/historia/clinica/alta/'
success_message = 'La historia clínica se guardo con exito'
def form_valid(self, form):
form.instance.persona = Persona.objects.get(pk=self.kwargs['pk'])
self.success_url = '/historia/clinica/redireccion/%s' % str(Persona.objects.get(pk=self.kwargs['pk']).id)
return super(HistoriaClinicaCreate, self).form_valid(form)
def get_context_data(self, **kwargs):
context = super(HistoriaClinicaCreate, self).get_context_data(**kwargs)
context['persona'] = Persona.objects.get(pk=self.kwargs['pk'])
return context
def redireccion(request, id):
titular = Titular.objects.filter(id=id)
if titular.exists():
persona = Persona.objects.filter(titular=titular)
historia_clinica = HistoriaClinica.objects.filter(persona=persona)
if historia_clinica.exists():
return HttpResponseRedirect('/historia/clinica/modi/' + str(historia_clinica[0].id))
else:
return HttpResponseRedirect('/historia/clinica/alta/' + str(persona[0].id))
else:
adherente = Adherente.objects.filter(id=id)
if adherente.exists():
persona = Persona.objects.filter(adherente=adherente)
historia_clinica = HistoriaClinica.objects.filter(persona=persona)<|fim▁hole|> else:
return HttpResponseRedirect('/historia/clinica/alta/' + str(persona[0].id))
class HistoriaClinicaUpdate(SuccessMessageMixin, UpdateView):
model = HistoriaClinica
form_class = HistoriaClinicaForms
template_name = 'historias_clinicas/historia_clinica_form.html'
success_url = '/historia/clinica/alta/'
success_message = 'La historia clínica se guardo con exito'
def form_valid(self, form):
form.instance.persona = Persona.objects.get(pk=HistoriaClinica.objects.get(pk=self.kwargs['pk']).persona.id)
self.success_url = '/historia/clinica/redireccion/%s' % str(HistoriaClinica.objects.get(
pk=self.kwargs['pk']).persona.id)
return super(HistoriaClinicaUpdate, self).form_valid(form)
def get_context_data(self, **kwargs):
context = super(HistoriaClinicaUpdate, self).get_context_data(**kwargs)
context['persona'] = Persona.objects.get(pk=HistoriaClinica.objects.get(pk=self.kwargs['pk']).persona.id)
return context
# Ajax ######
def patologia_create_ajax(request):
if request.method == 'POST':
if request.is_ajax():
id = ''
patologia = Patologia(nombre_enfermedad=request.POST.get('id_nombre_enfermedad_patologia_ajax'),
fecha_deteccion=request.POST.get('id_fecha_deteccion_patologia_ajax'))
patologia.save()
patologia = Patologia.objects.filter(
nombre_enfermedad=request.POST.get('id_nombre_enfermedad_patologia_ajax'),
fecha_deteccion=request.POST.get('id_fecha_deteccion_patologia_ajax'))
for indice in patologia.values('id'):
for valor in indice:
id = indice[valor]
for indice in patologia.values('nombre_enfermedad'):
for valor in indice:
nombre_enfermedad = indice[valor]
return JsonResponse({'id': id, 'nombre_enfermedad': nombre_enfermedad})<|fim▁end|> | if historia_clinica.exists():
return HttpResponseRedirect('/historia/clinica/modi/' + str(historia_clinica[0].id)) |
<|file_name|>0008_auto_20151023_1317.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0007_auto_20151023_1012'),<|fim▁hole|>
operations = [
migrations.AddField(
model_name='documentclassification',
name='created_at',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AddField(
model_name='documentclassification',
name='ip',
field=models.CharField(default=b'', max_length=100),
),
]<|fim▁end|> | ] |
<|file_name|>midi.rs<|end_file_name|><|fim▁begin|>#![feature(unboxed_closures)]
extern crate synthrs;
use synthrs::synthesizer::{ make_samples_from_midi, quantize_samples };
use synthrs::writer::write_wav;
fn main() {
write_wav("out/octave.wav", 44100,
quantize_samples::<i16>(
make_samples_from_midi(44100, "examples/assets/octave.mid")
)
).ok().expect("failed");
write_wav("out/seikilos.wav", 44100,
quantize_samples::<i16>(
make_samples_from_midi(44100, "examples/assets/seikilos.mid")
)
).ok().expect("failed");
write_wav("out/danube.wav", 44100,<|fim▁hole|> make_samples_from_midi(44100, "examples/assets/danube.mid")
)
).ok().expect("failed");
write_wav("out/mountainking.wav", 44100,
quantize_samples::<i16>(
make_samples_from_midi(44100, "examples/assets/mountainking.mid")
)
).ok().expect("failed");
write_wav("out/rustle.wav", 44100,
quantize_samples::<i16>(
make_samples_from_midi(44100, "examples/assets/rustle.mid")
)
).ok().expect("failed");
}<|fim▁end|> | quantize_samples::<i16>( |
<|file_name|>redux-saga-effects.dev.cjs.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var is = require('@redux-saga/is');
var __chunk_1 = require('./chunk-5caa0f1a.js');
var __chunk_2 = require('./chunk-062c0282.js');
require('@babel/runtime/helpers/extends');
require('@redux-saga/symbols');
require('@redux-saga/delay-p');
var done = function done(value) {
return {
done: true,
value: value
};
};
var qEnd = {};
function safeName(patternOrChannel) {
if (is.channel(patternOrChannel)) {
return 'channel';
}
if (is.stringableFunc(patternOrChannel)) {
return String(patternOrChannel);
}
if (is.func(patternOrChannel)) {
return patternOrChannel.name;
}
return String(patternOrChannel);
}
function fsmIterator(fsm, startState, name) {
var stateUpdater,
errorState,
effect,
nextState = startState;
function next(arg, error) {
if (nextState === qEnd) {
return done(arg);
}
if (error && !errorState) {
nextState = qEnd;
throw error;
} else {
stateUpdater && stateUpdater(arg);
var currentState = error ? fsm[errorState](error) : fsm[nextState]();
nextState = currentState.nextState;
effect = currentState.effect;
stateUpdater = currentState.stateUpdater;
errorState = currentState.errorState;
return nextState === qEnd ? done(arg) : effect;
}
}
return __chunk_1.makeIterator(next, function (error) {
return next(null, error);
}, name);
}
function takeEvery(patternOrChannel, worker) {
for (var _len = arguments.length, args = new Array(_len > 2 ? _len - 2 : 0), _key = 2; _key < _len; _key++) {
args[_key - 2] = arguments[_key];
}
var yTake = {
done: false,
value: __chunk_2.take(patternOrChannel)
};
var yFork = function yFork(ac) {
return {
done: false,
value: __chunk_2.fork.apply(void 0, [worker].concat(args, [ac]))
};
};
var action,
setAction = function setAction(ac) {
return action = ac;
};
return fsmIterator({
q1: function q1() {
return {
nextState: 'q2',
effect: yTake,
stateUpdater: setAction
};
},
q2: function q2() {
return {
nextState: 'q1',
effect: yFork(action)
};
}
}, 'q1', "takeEvery(" + safeName(patternOrChannel) + ", " + worker.name + ")");
}
function takeLatest(patternOrChannel, worker) {
for (var _len = arguments.length, args = new Array(_len > 2 ? _len - 2 : 0), _key = 2; _key < _len; _key++) {
args[_key - 2] = arguments[_key];
}
var yTake = {
done: false,
value: __chunk_2.take(patternOrChannel)
};
var yFork = function yFork(ac) {
return {
done: false,
value: __chunk_2.fork.apply(void 0, [worker].concat(args, [ac]))
};
};
var yCancel = function yCancel(task) {
return {
done: false,
value: __chunk_2.cancel(task)
};
};
var task, action;
var setTask = function setTask(t) {
return task = t;
};
var setAction = function setAction(ac) {
return action = ac;
};
return fsmIterator({
q1: function q1() {
return {
nextState: 'q2',
effect: yTake,
stateUpdater: setAction
};
},
q2: function q2() {
return task ? {
nextState: 'q3',
effect: yCancel(task)
} : {
nextState: 'q1',
effect: yFork(action),
stateUpdater: setTask
};
},
q3: function q3() {
return {
nextState: 'q1',
effect: yFork(action),
stateUpdater: setTask
};
}
}, 'q1', "takeLatest(" + safeName(patternOrChannel) + ", " + worker.name + ")");
}
function takeLeading(patternOrChannel, worker) {
for (var _len = arguments.length, args = new Array(_len > 2 ? _len - 2 : 0), _key = 2; _key < _len; _key++) {
args[_key - 2] = arguments[_key];
}
var yTake = {
done: false,
value: __chunk_2.take(patternOrChannel)
};
var yCall = function yCall(ac) {
return {
done: false,
value: __chunk_2.call.apply(void 0, [worker].concat(args, [ac]))
};
};
var action;
var setAction = function setAction(ac) {
return action = ac;
};
return fsmIterator({
q1: function q1() {
return {
nextState: 'q2',
effect: yTake,
stateUpdater: setAction
};
},
q2: function q2() {
return {
nextState: 'q1',
effect: yCall(action)
};
}
}, 'q1', "takeLeading(" + safeName(patternOrChannel) + ", " + worker.name + ")");
}
function throttle(delayLength, pattern, worker) {
for (var _len = arguments.length, args = new Array(_len > 3 ? _len - 3 : 0), _key = 3; _key < _len; _key++) {
args[_key - 3] = arguments[_key];
}
var action, channel;
var yActionChannel = {
done: false,
value: __chunk_2.actionChannel(pattern, __chunk_2.sliding(1))
};
var yTake = function yTake() {
return {
done: false,
value: __chunk_2.take(channel)
};
};<|fim▁hole|>
var yFork = function yFork(ac) {
return {
done: false,
value: __chunk_2.fork.apply(void 0, [worker].concat(args, [ac]))
};
};
var yDelay = {
done: false,
value: __chunk_2.delay(delayLength)
};
var setAction = function setAction(ac) {
return action = ac;
};
var setChannel = function setChannel(ch) {
return channel = ch;
};
return fsmIterator({
q1: function q1() {
return {
nextState: 'q2',
effect: yActionChannel,
stateUpdater: setChannel
};
},
q2: function q2() {
return {
nextState: 'q3',
effect: yTake(),
stateUpdater: setAction
};
},
q3: function q3() {
return {
nextState: 'q4',
effect: yFork(action)
};
},
q4: function q4() {
return {
nextState: 'q2',
effect: yDelay
};
}
}, 'q1', "throttle(" + safeName(pattern) + ", " + worker.name + ")");
}
function retry(maxTries, delayLength, fn) {
var counter = maxTries;
for (var _len = arguments.length, args = new Array(_len > 3 ? _len - 3 : 0), _key = 3; _key < _len; _key++) {
args[_key - 3] = arguments[_key];
}
var yCall = {
done: false,
value: __chunk_2.call.apply(void 0, [fn].concat(args))
};
var yDelay = {
done: false,
value: __chunk_2.delay(delayLength)
};
return fsmIterator({
q1: function q1() {
return {
nextState: 'q2',
effect: yCall,
errorState: 'q10'
};
},
q2: function q2() {
return {
nextState: qEnd
};
},
q10: function q10(error) {
counter -= 1;
if (counter <= 0) {
throw error;
}
return {
nextState: 'q1',
effect: yDelay
};
}
}, 'q1', "retry(" + fn.name + ")");
}
function debounceHelper(delayLength, patternOrChannel, worker) {
for (var _len = arguments.length, args = new Array(_len > 3 ? _len - 3 : 0), _key = 3; _key < _len; _key++) {
args[_key - 3] = arguments[_key];
}
var action, raceOutput;
var yTake = {
done: false,
value: __chunk_2.take(patternOrChannel)
};
var yRace = {
done: false,
value: __chunk_2.race({
action: __chunk_2.take(patternOrChannel),
debounce: __chunk_2.delay(delayLength)
})
};
var yFork = function yFork(ac) {
return {
done: false,
value: __chunk_2.fork.apply(void 0, [worker].concat(args, [ac]))
};
};
var yNoop = function yNoop(value) {
return {
done: false,
value: value
};
};
var setAction = function setAction(ac) {
return action = ac;
};
var setRaceOutput = function setRaceOutput(ro) {
return raceOutput = ro;
};
return fsmIterator({
q1: function q1() {
return {
nextState: 'q2',
effect: yTake,
stateUpdater: setAction
};
},
q2: function q2() {
return {
nextState: 'q3',
effect: yRace,
stateUpdater: setRaceOutput
};
},
q3: function q3() {
return raceOutput.debounce ? {
nextState: 'q1',
effect: yFork(action)
} : {
nextState: 'q2',
effect: yNoop(raceOutput.action),
stateUpdater: setAction
};
}
}, 'q1', "debounce(" + safeName(patternOrChannel) + ", " + worker.name + ")");
}
var validateTakeEffect = function validateTakeEffect(fn, patternOrChannel, worker) {
__chunk_1.check(patternOrChannel, is.notUndef, fn.name + " requires a pattern or channel");
__chunk_1.check(worker, is.notUndef, fn.name + " requires a saga parameter");
};
function takeEvery$1(patternOrChannel, worker) {
{
validateTakeEffect(takeEvery$1, patternOrChannel, worker);
}
for (var _len = arguments.length, args = new Array(_len > 2 ? _len - 2 : 0), _key = 2; _key < _len; _key++) {
args[_key - 2] = arguments[_key];
}
return __chunk_2.fork.apply(void 0, [takeEvery, patternOrChannel, worker].concat(args));
}
function takeLatest$1(patternOrChannel, worker) {
{
validateTakeEffect(takeLatest$1, patternOrChannel, worker);
}
for (var _len2 = arguments.length, args = new Array(_len2 > 2 ? _len2 - 2 : 0), _key2 = 2; _key2 < _len2; _key2++) {
args[_key2 - 2] = arguments[_key2];
}
return __chunk_2.fork.apply(void 0, [takeLatest, patternOrChannel, worker].concat(args));
}
function takeLeading$1(patternOrChannel, worker) {
{
validateTakeEffect(takeLeading$1, patternOrChannel, worker);
}
for (var _len3 = arguments.length, args = new Array(_len3 > 2 ? _len3 - 2 : 0), _key3 = 2; _key3 < _len3; _key3++) {
args[_key3 - 2] = arguments[_key3];
}
return __chunk_2.fork.apply(void 0, [takeLeading, patternOrChannel, worker].concat(args));
}
function throttle$1(ms, pattern, worker) {
{
__chunk_1.check(pattern, is.notUndef, 'throttle requires a pattern');
__chunk_1.check(worker, is.notUndef, 'throttle requires a saga parameter');
}
for (var _len4 = arguments.length, args = new Array(_len4 > 3 ? _len4 - 3 : 0), _key4 = 3; _key4 < _len4; _key4++) {
args[_key4 - 3] = arguments[_key4];
}
return __chunk_2.fork.apply(void 0, [throttle, ms, pattern, worker].concat(args));
}
function retry$1(maxTries, delayLength, worker) {
for (var _len5 = arguments.length, args = new Array(_len5 > 3 ? _len5 - 3 : 0), _key5 = 3; _key5 < _len5; _key5++) {
args[_key5 - 3] = arguments[_key5];
}
return __chunk_2.call.apply(void 0, [retry, maxTries, delayLength, worker].concat(args));
}
function debounce(delayLength, pattern, worker) {
for (var _len6 = arguments.length, args = new Array(_len6 > 3 ? _len6 - 3 : 0), _key6 = 3; _key6 < _len6; _key6++) {
args[_key6 - 3] = arguments[_key6];
}
return __chunk_2.fork.apply(void 0, [debounceHelper, delayLength, pattern, worker].concat(args));
}
exports.effectTypes = __chunk_2.effectTypes;
exports.take = __chunk_2.take;
exports.takeMaybe = __chunk_2.takeMaybe;
exports.put = __chunk_2.put;
exports.putResolve = __chunk_2.putResolve;
exports.all = __chunk_2.all;
exports.race = __chunk_2.race;
exports.call = __chunk_2.call;
exports.apply = __chunk_2.apply;
exports.cps = __chunk_2.cps;
exports.fork = __chunk_2.fork;
exports.spawn = __chunk_2.spawn;
exports.join = __chunk_2.join;
exports.cancel = __chunk_2.cancel;
exports.select = __chunk_2.select;
exports.actionChannel = __chunk_2.actionChannel;
exports.cancelled = __chunk_2.cancelled;
exports.flush = __chunk_2.flush;
exports.getContext = __chunk_2.getContext;
exports.setContext = __chunk_2.setContext;
exports.delay = __chunk_2.delay;
exports.debounce = debounce;
exports.retry = retry$1;
exports.takeEvery = takeEvery$1;
exports.takeLatest = takeLatest$1;
exports.takeLeading = takeLeading$1;
exports.throttle = throttle$1;<|fim▁end|> | |
<|file_name|>value.rs<|end_file_name|><|fim▁begin|>use std::fmt::{Formatter,Display,Error};
#[derive(Clone,Debug,PartialEq,Eq,Hash,RustcEncodable,RustcDecodable)]
pub enum Endianess {
Little,
Big,
}
#[derive(Clone,Debug,PartialEq,Eq,Hash,RustcEncodable,RustcDecodable)]
pub enum Rvalue {
Constant(u64),
Undefined,
Variable{ width: u16, name: String, subscript: Option<u32> },
Memory{ offset: Box<Rvalue>, bytes: u16, endianess: Endianess, name: String },
}
#[derive(Clone,Debug,PartialEq,Eq,Hash,RustcEncodable,RustcDecodable)]
pub enum Lvalue {
Undefined,
Variable{ width: u16, name: String, subscript: Option<u32> },
Memory{ offset: Box<Rvalue>, bytes: u16, endianess: Endianess, name: String },
}
impl Rvalue {
pub fn from_lvalue(rv: &Lvalue) -> Rvalue {
match rv {
&Lvalue::Undefined => Rvalue::Undefined,
&Lvalue::Variable{ width: ref w, name: ref n, subscript: ref s} =>
Rvalue::Variable{ width: w.clone(), name: n.clone(), subscript: s.clone()},
&Lvalue::Memory{ offset: ref o, bytes: ref b, endianess: ref e, name: ref n} =>
Rvalue::Memory{ offset: o.clone(), bytes: b.clone(), endianess: e.clone(), name: n.clone()},
}
}
}
impl Display for Rvalue {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
match self {
&Rvalue::Constant(c) => f.write_fmt(format_args!("{:x}",c)),
&Rvalue::Undefined => f.write_str("undef"),
&Rvalue::Variable{ name: ref n,.. } => f.write_str(n),
&Rvalue::Memory{ offset: ref o, name: ref n,..} => f.write_fmt(format_args!("{}[{}]",n,o)),
}
}
}
impl Lvalue {
pub fn from_rvalue(rv: &Rvalue) -> Option<Lvalue> {
match rv {
&Rvalue::Undefined => Some(Lvalue::Undefined),
&Rvalue::Variable{ width: ref w, name: ref n, subscript: ref s} =>
Some(Lvalue::Variable{ width: w.clone(), name: n.clone(), subscript: s.clone()}),
&Rvalue::Memory{ offset: ref o, bytes: ref b, endianess: ref e, name: ref n} =>
Some(Lvalue::Memory{ offset: o.clone(), bytes: b.clone(), endianess: e.clone(), name: n.clone()}),<|fim▁hole|> _ => None,
}
}
pub fn to_rv(&self) -> Rvalue {
Rvalue::from_lvalue(self)
}
}
pub trait ToRvalue {
fn to_rv(&self) -> Rvalue;
}
impl ToRvalue for Rvalue {
fn to_rv(&self) -> Rvalue {
self.clone()
}
}
impl ToRvalue for Lvalue {
fn to_rv(&self) -> Rvalue {
Rvalue::from_lvalue(self)
}
}
impl ToRvalue for u64 {
fn to_rv(&self) -> Rvalue {
Rvalue::Constant(self.clone())
}
}
#[cfg(test)]
mod tests {
use super::*;
use msgpack;
#[test]
fn construct() {
let u = Rvalue::Undefined;
let c = Rvalue::Constant(5);
let v = Rvalue::Variable{ name: "n".to_string(), width: 32, subscript: None };
let m = Rvalue::Memory{ offset: Box::new(Rvalue::Undefined), bytes: 1, endianess: Endianess::Little, name: "ram".to_string() };
let u2 = u.clone();
let c2 = c.clone();
let v2 = v.clone();
let m2 = m.clone();
println!("{:?} {:?} {:?} {:?}",u,c,v,m);
assert_eq!(u,u2);
assert_eq!(c,c2);
assert_eq!(v,v2);
assert_eq!(m,m2);
}
#[test]
fn convert_lvalue_rvalue() {
let ru = Rvalue::Undefined;
let rc = Rvalue::Constant(5);
let rv = Rvalue::Variable{ name: "n".to_string(), width: 32, subscript: None };
let rm = Rvalue::Memory{ offset: Box::new(Rvalue::Undefined), bytes: 1, endianess: Endianess::Little, name: "ram".to_string() };
let lu = Lvalue::Undefined;
let lv = Lvalue::Variable{ name: "n".to_string(), width: 32, subscript: None };
let lm = Lvalue::Memory{ offset: Box::new(Rvalue::Undefined), bytes: 1, endianess: Endianess::Little, name: "ram".to_string() };
assert_eq!(Some(lu.clone()), Lvalue::from_rvalue(&ru));
assert_eq!(Some(lv.clone()), Lvalue::from_rvalue(&rv));
assert_eq!(Some(lm.clone()), Lvalue::from_rvalue(&rm));
assert_eq!(None, Lvalue::from_rvalue(&rc));
assert_eq!(ru, Rvalue::from_lvalue(&lu));
assert_eq!(rv, Rvalue::from_lvalue(&lv));
assert_eq!(rm, Rvalue::from_lvalue(&lm));
}
#[test]
fn marshal() {
let a = Rvalue::Undefined;
let b = Rvalue::Constant(42);
let c = Rvalue::Variable{ name: "test".to_string(), width: 8, subscript: Some(8) };
let d = Rvalue::Memory{ offset: Box::new(Rvalue::Constant(5)), bytes: 2, endianess: Endianess::Little, name: "bank1".to_string()};
let a2 = msgpack::Encoder::to_msgpack(&a).ok().unwrap();
let b2 = msgpack::Encoder::to_msgpack(&b).ok().unwrap();
let c2 = msgpack::Encoder::to_msgpack(&c).ok().unwrap();
let d2 = msgpack::Encoder::to_msgpack(&d).ok().unwrap();
let a3 = msgpack::from_msgpack(&a2).ok().unwrap();
let b3 = msgpack::from_msgpack(&b2).ok().unwrap();
let c3 = msgpack::from_msgpack(&c2).ok().unwrap();
let d3 = msgpack::from_msgpack(&d2).ok().unwrap();
assert_eq!(a, a3);
assert_eq!(b, b3);
assert_eq!(c, c3);
assert_eq!(d, d3);
}
}<|fim▁end|> | |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>/*
* Minio Client (C) 2014, 2015 Minio, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package main
import (
"bytes"
"fmt"
"os"
"path/filepath"
"runtime"
"sort"
"strconv"
"github.com/minio/cli"
"github.com/minio/mc/pkg/console"
"github.com/minio/minio/pkg/probe"
"github.com/minio/pb"
"github.com/pkg/profile"
)
var (
// global flags for mc.
mcFlags = []cli.Flag{
cli.BoolFlag{
Name: "help, h",
Usage: "Show help.",
},
}
)
// Help template for mc
var mcHelpTemplate = `NAME:
{{.Name}} - {{.Usage}}
USAGE:
{{.Name}} {{if .Flags}}[FLAGS] {{end}}COMMAND{{if .Flags}} [COMMAND FLAGS | -h]{{end}} [ARGUMENTS...]
COMMANDS:
{{range .Commands}}{{join .Names ", "}}{{ "\t" }}{{.Usage}}
{{end}}{{if .Flags}}
GLOBAL FLAGS:
{{range .Flags}}{{.}}
{{end}}{{end}}
VERSION:
` + mcVersion +
`{{ "\n"}}{{range $key, $value := ExtraInfo}}
{{$key}}:
{{$value}}
{{end}}`
// Function invoked when invalid command is passed.
func commandNotFound(ctx *cli.Context, command string) {
msg := fmt.Sprintf("‘%s’ is not a mc command. See ‘mc --help’.", command)
closestCommands := findClosestCommands(command)
if len(closestCommands) > 0 {
msg += fmt.Sprintf("\n\nDid you mean one of these?\n")
if len(closestCommands) == 1 {
cmd := closestCommands[0]
msg += fmt.Sprintf(" ‘%s’", cmd)
} else {
for _, cmd := range closestCommands {
msg += fmt.Sprintf(" ‘%s’\n", cmd)
}
}
}
fatalIf(errDummy().Trace(), msg)
}
// Check for sane config environment early on and gracefully report.
func checkConfig() {
// Refresh the config once.
loadMcConfig = loadMcConfigFactory()
// Ensures config file is sane.
config, err := loadMcConfig()
// Verify if the path is accesible before validating the config
fatalIf(err.Trace(mustGetMcConfigPath()), "Unable to access configuration file.")
// Validate and print error messges
ok, errMsgs := validateConfigFile(config)
if !ok {
var errorMsg bytes.Buffer
for index, errMsg := range errMsgs {
// Print atmost 10 errors
if index > 10 {
break
}
errorMsg.WriteString(errMsg + "\n")
}
console.Fatalln(errorMsg.String())
}
}
func migrate() {
// Fix broken config files if any.
fixConfig()
// Migrate config files if any.
migrateConfig()
// Migrate session files if any.
migrateSession()
// Migrate shared urls if any.
migrateShare()
}
// Get os/arch/platform specific information.
// Returns a map of current os/arch/platform/memstats.
func getSystemData() map[string]string {
host, e := os.Hostname()
fatalIf(probe.NewError(e), "Unable to determine the hostname.")
memstats := &runtime.MemStats{}
runtime.ReadMemStats(memstats)
mem := fmt.Sprintf("Used: %s | Allocated: %s | UsedHeap: %s | AllocatedHeap: %s",
pb.Format(int64(memstats.Alloc)).To(pb.U_BYTES),
pb.Format(int64(memstats.TotalAlloc)).To(pb.U_BYTES),
pb.Format(int64(memstats.HeapAlloc)).To(pb.U_BYTES),
pb.Format(int64(memstats.HeapSys)).To(pb.U_BYTES))
platform := fmt.Sprintf("Host: %s | OS: %s | Arch: %s", host, runtime.GOOS, runtime.GOARCH)
goruntime := fmt.Sprintf("Version: %s | CPUs: %s", runtime.Version(), strconv.Itoa(runtime.NumCPU()))
return map[string]string{
"PLATFORM": platform,
"RUNTIME": goruntime,
"MEM": mem,
}
}
// initMC - initialize 'mc'.
func initMC() {
// Check if mc config exists.
if !isMcConfigExists() {
err := saveMcConfig(newMcConfig())
fatalIf(err.Trace(), "Unable to save new mc config.")
console.Infoln("Configuration written to ‘" + mustGetMcConfigPath() + "’. Please update your access credentials.")
}
// Check if mc session folder exists.
if !isSessionDirExists() {
fatalIf(createSessionDir().Trace(), "Unable to create session config folder.")
}
// Check if mc share folder exists.
if !isShareDirExists() {
initShareConfig()
}
}
func registerBefore(ctx *cli.Context) error {
// Check if mc was compiled using a supported version of Golang.
checkGoVersion()
// Set the config folder.
setMcConfigDir(ctx.GlobalString("config-folder"))
// Migrate any old version of config / state files to newer format.
migrate()
// Initialize default config files.
initMC()
// Set global flags.
setGlobalsFromContext(ctx)
// Check if config can be read.
checkConfig()
return nil
}<|fim▁hole|>func findClosestCommands(command string) []string {
var closestCommands []string
for _, value := range commandsTree.PrefixMatch(command) {
closestCommands = append(closestCommands, value.(string))
}
sort.Strings(closestCommands)
// Suggest other close commands - allow missed, wrongly added and even transposed characters
for _, value := range commandsTree.walk(commandsTree.root) {
if sort.SearchStrings(closestCommands, value.(string)) < len(closestCommands) {
continue
}
// 2 is arbitrary and represents the max allowed number of typed errors
if DamerauLevenshteinDistance(command, value.(string)) < 2 {
closestCommands = append(closestCommands, value.(string))
}
}
return closestCommands
}
func registerApp() *cli.App {
// Register all the commands (refer flags.go)
registerCmd(lsCmd) // List contents of a bucket.
registerCmd(mbCmd) // Make a bucket.
registerCmd(catCmd) // Display contents of a file.
registerCmd(pipeCmd) // Write contents of stdin to a file.
registerCmd(shareCmd) // Share documents via URL.
registerCmd(cpCmd) // Copy objects and files from multiple sources to single destination.
registerCmd(mirrorCmd) // Mirror objects and files from single source to multiple destinations.
registerCmd(diffCmd) // Computer differences between two files or folders.
registerCmd(rmCmd) // Remove a file or bucket
registerCmd(accessCmd) // Set access permissions.
registerCmd(sessionCmd) // Manage sessions for copy and mirror.
registerCmd(configCmd) // Configure minio client.
registerCmd(updateCmd) // Check for new software updates.
registerCmd(versionCmd) // Print version.
app := cli.NewApp()
app.Usage = "Minio Client for cloud storage and filesystems."
app.Commands = commands
app.Author = "Minio.io"
app.Flags = append(mcFlags, globalFlags...)
app.CustomAppHelpTemplate = mcHelpTemplate
app.CommandNotFound = commandNotFound // handler function declared above.
return app
}
// mustGetProfilePath must get location that the profile will be written to.
func mustGetProfileDir() string {
return filepath.Join(mustGetMcConfigDir(), globalProfileDir)
}
func main() {
// Enable profiling supported modes are [cpu, mem, block].
// ``MC_PROFILER`` supported options are [cpu, mem, block].
switch os.Getenv("MC_PROFILER") {
case "cpu":
defer profile.Start(profile.CPUProfile, profile.ProfilePath(mustGetProfileDir())).Stop()
case "mem":
defer profile.Start(profile.MemProfile, profile.ProfilePath(mustGetProfileDir())).Stop()
case "block":
defer profile.Start(profile.BlockProfile, profile.ProfilePath(mustGetProfileDir())).Stop()
}
probe.Init() // Set project's root source path.
probe.SetAppInfo("Release-Tag", mcReleaseTag)
probe.SetAppInfo("Commit", mcShortCommitID)
app := registerApp()
app.Before = registerBefore
app.ExtraInfo = func() map[string]string {
if _, e := pb.GetTerminalWidth(); e != nil {
globalQuiet = true
}
if globalDebug {
return getSystemData()
}
return make(map[string]string)
}
app.RunAndExitOnError()
}<|fim▁end|> |
// findClosestCommands to match a given string with commands trie tree. |
<|file_name|>places.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, AfterViewInit } from '@angular/core';
import { Store } from '@ngrx/store';
import { TdMediaService } from '@covalent/core';
import { go } from '@ngrx/router-store';
import * as fromRoot from '../state-management/reducers';
import { OpenSidenavAction } from '../state-management/actions/layout-action';
@Component({
selector: 'app-places',
templateUrl: './places.component.html'
})
export class PlacesComponent implements OnInit, AfterViewInit {
center$;
places$;
pending$;
address$;
selectedPlace$;
layoutOpen$;
constructor(public media: TdMediaService,
private store: Store<fromRoot.State>) {}
ngOnInit() {
this.pending$ = this.store.select(fromRoot.pending);
this.address$ = this.store.select(fromRoot.address);
this.selectedPlace$ = this.store.select(fromRoot.selected);
this.center$ = this.store.select(fromRoot.center);
this.places$ = this.store.select(fromRoot.places);
this.layoutOpen$ = this.store.select(fromRoot.getShowSidenav);
}<|fim▁hole|>
onChangeSelectedPlace(placeKey) {
this.store.dispatch(new OpenSidenavAction());
this.store.dispatch(go(`/detail/${placeKey}`));
}
}<|fim▁end|> |
ngAfterViewInit() {
this.media.broadcast();
} |
<|file_name|>send_photo_link_body.go<|end_file_name|><|fim▁begin|>// Code generated by go-swagger; DO NOT EDIT.
package models
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
<|fim▁hole|> "github.com/go-openapi/errors"
"github.com/go-openapi/swag"
"github.com/go-openapi/validate"
)
// SendPhotoLinkBody send photo link body
// swagger:model SendPhotoLinkBody
type SendPhotoLinkBody struct {
// caption
Caption string `json:"caption,omitempty"`
// chat id
// Required: true
ChatID interface{} `json:"chat_id"`
// disable notification
DisableNotification bool `json:"disable_notification,omitempty"`
// photo
// Required: true
Photo *string `json:"photo"`
// reply markup
ReplyMarkup interface{} `json:"reply_markup,omitempty"`
// reply to message id
ReplyToMessageID int64 `json:"reply_to_message_id,omitempty"`
}
// Validate validates this send photo link body
func (m *SendPhotoLinkBody) Validate(formats strfmt.Registry) error {
var res []error
if err := m.validateChatID(formats); err != nil {
// prop
res = append(res, err)
}
if err := m.validatePhoto(formats); err != nil {
// prop
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *SendPhotoLinkBody) validateChatID(formats strfmt.Registry) error {
return nil
}
func (m *SendPhotoLinkBody) validatePhoto(formats strfmt.Registry) error {
if err := validate.Required("photo", "body", m.Photo); err != nil {
return err
}
return nil
}
// MarshalBinary interface implementation
func (m *SendPhotoLinkBody) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation
func (m *SendPhotoLinkBody) UnmarshalBinary(b []byte) error {
var res SendPhotoLinkBody
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*m = res
return nil
}<|fim▁end|> | import (
strfmt "github.com/go-openapi/strfmt"
|
<|file_name|>ZaOverviewPanelController.js<|end_file_name|><|fim▁begin|>define(["require", "exports"], function (require, exports) {<|fim▁hole|><|fim▁end|> | "use strict";
exports.ZaOverviewPanelController = ZaOverviewPanelController;
}); |
<|file_name|>aggregate-accessibility.ts<|end_file_name|><|fim▁begin|>//
// disable a few ESLint rules that choke on pretty-printed arrays below
/* eslint indent: 0, no-multi-spaces: 0 */
/** Test that the aggregate accessibility selector works correctly */
import {expect} from '@jest/globals'
import range from 'lodash/range'
import getAggregateAccessibility from '../aggregate-accessibility'
const contains = (width, height) => (x, y) =>
x >= 0 && y >= 0 && x < width && y < height
describe('utils > aggregation accessibility', () => {
it('should work', () => {
// if we think of these as population, the two right cells of the top row have population of 100 and 25
const population = {
contains: contains(3, 2),
data: [
10000,
100,
25, // aggregation area overlaps rightmost two cells
10000,
10000,
10000
],
height: 2,
min: 0,
north: 50,
west: 49,
width: 3,
zoom: 9
}
// The aggregation area starts in the second cell of the weights and doesn't cover the lower row
// it covers 50% of the top center (100 people) and 100% of the top right (25 people)
const aggregationArea = {
grid: {
contains: contains(2, 1),
data: [50000, 100000],
height: 1,
min: 0,
north: 50,
west: 50,
width: 2,
zoom: 9
}
}
// Accessibility starts two cells north and two cells west of aggregation area
const accessibility = {
contains: contains(4, 4),
data: [
100,
100,
100,
100,
100,
100,
100,
100,
100,
100,
1000,
5500, // rightmost two overlap aggregation area
100,
100,
100,
100
],
height: 4,
min: 0,
north: 48,
west: 48,
width: 4,
zoom: 9
}
// okay all together now. The aggregation area covers 50% of one cell that has population 100 and
// accessibility 1000, yielding a spike at 1000 of height 50 people, and 100% of another cell
// with population 25 and accessibility 5500, yielding 25 people with an accessibility of 5500
// There are fifteen bins scaled between min and max; the first bin should therefore run from 1000 to
// 1300 and have value 50, and the final bin should run from 5700 to 5500 and have value 25.
// all percentiles up to the 66th should have value 1000, all above value 5500.
const aggregateAccessibility = getAggregateAccessibility(
accessibility,
aggregationArea,
population
)
// throw an error to make flow happy if aggregateAccessibility is undefined
if (!aggregateAccessibility) {
throw new Error('expected aggregate accessibility to be defined')
}
expect(aggregateAccessibility.bins).toHaveLength(15)
expect(aggregateAccessibility.bins[0].min).toEqual(1000)
expect(aggregateAccessibility.bins[0].max).toEqual(1300)
expect(aggregateAccessibility.bins[0].value).toEqual(50)
expect(aggregateAccessibility.bins[14].min).toEqual(5200)
expect(aggregateAccessibility.bins[14].max).toEqual(5500)
expect(aggregateAccessibility.bins[14].value).toEqual(25)
const expectedPercentiles = [
0, // 0th percentile is 0 by definition
...range(66).map(() => 1000), // lower 2/3 of population are in lower accessibility area
...range(33).map(() => 5500) // upper 2/3 is in higher accessibility area
]
expect(aggregateAccessibility.percentiles).toEqual(expectedPercentiles)<|fim▁hole|> })
})<|fim▁end|> | expect(aggregateAccessibility.weightedAverage).toEqual(2500) |
<|file_name|>chat.js<|end_file_name|><|fim▁begin|>var API_PHP ="http://devtucompass.tk/pici/BackEnd/Clases/chatAdmin.php";
var API_REST = "http://devtucompass.tk/pici/API/";
$.support.cors = true;
$.mobile.allowCrossDomainPages = true;
function enviarInfo(){
$("#enviarInfo").click(function(){
$.ajax({
type: "POST",
url: API_PHP,
data: $("#info").serialize(), // serializes the form's elements.
error: function(jqXHR, textStatus, errorThrown){
console.log("hi");
console.log(jqXHR);<|fim▁hole|> console.log(errorThrown);
//do stuff
},
cache:false
}).done( function (data){
localStorage.cedula= $("#cedula").val();
$("#cedulaH").attr('value',localStorage.cedula);
localStorage.useradmin = -1;
$.mobile.navigate( "#chat");
sendMessage();
});
});
}
function sendMessage(){
$("#send").click(function(){
$.ajax({
type: "POST",
url: API_PHP,
data: $("#messagechat").serialize(), // serializes the form's elements.
error: function(jqXHR, textStatus, errorThrown){
alert("hi");
alert(jqXHR);
alert(textStatus);
alert(errorThrown);
//do stuff
},
cache:false
}).done( function (data){
console.log("la informacion es: "+data);
});
});
$("#message").val("");
}
function getLogMessages() {
var cedula= localStorage.cedula ;
//alert("la Cedula es: "+cedula);
var content = "";
$.ajax({
type: "GET",
url: API_REST+"lastLogChat?cedula="+cedula,
error: function(jqXHR, textStatus, errorThrown){
console.log("hi");
console.log(jqXHR);
console.log(textStatus);
console.log(errorThrown);
//do stuff
},
cache:false,
format:"jsonp",
crossDomain: true,
async: true
}).done( function (data){
$.each( data, function ( i, item ){
//alert(item.id);
if(item.por ==='si'){
content+='<li data-theme="c">'+
'<a href="#">'+
'<h2>Consulta Sena PICI Dice: </h2>'+
'<p>'+item.mensaje+'</p>'+
'<p class="ui-li-aside"></p>'+
'</a>'+
'</li>';
}else{
content+='<li data-theme="e">'+
'<a href="#">'+
'<h2>Tu Dices: </h2>'+
'<p>'+item.mensaje+'</p>'+
'<p class="ui-li-aside"></p>'+
'</a>'+
'</li>';
}
});
$("#logMensajes").html(content);
$( "#logMensajes" ).listview( "refresh" );
});
}<|fim▁end|> | console.log(textStatus); |
<|file_name|>commons.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# MouseTrap
#
# Copyright 2009 Flavio Percoco Premoli<|fim▁hole|>#
# MouseTrap is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License v2 as published
# by the Free Software Foundation.
#
# mouseTrap is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with mouseTrap. If not, see <http://www.gnu.org/licenses/>.
""" Common MouseTrap Functions. """
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2008 Flavio Percoco Premoli."
__license__ = "GPLv2"
import os
import re
def get_py_list(dirlist):
"""
Checks for .py files on directories in dirlist
and removes the extensions.
Arguments:
- dirlist: The directories list.
"""
if not type(dirlist) is list:
dirlist = [dirlist]
reg = re.compile(r'([A-Za-z0-9]+)\.py$', re.DOTALL)
group = []
for dir in dirlist:
if not os.path.isdir(dir):
continue
group.append([ mod[0] for mod in [ reg.findall(f) for f in os.listdir("%s/" % dir) if "handler" not in f] if mod ])
return [] + [x for l in group for x in l]<|fim▁end|> | #
# This file is part of mouseTrap. |
<|file_name|>bitcoin_zh_CN.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="zh_CN" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About CryptocauseCoin</source>
<translation>关于比特币</translation>
</message>
<message>
<location line="+39"/>
<source><b>CryptocauseCoin</b> version</source>
<translation><b>比特币</b>版本</translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation>版权</translation>
</message>
<message>
<location line="+0"/>
<source>The CryptocauseCoin developers</source>
<translation>CryptocauseCoin-qt 客户端开发团队</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>通讯录</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>双击以编辑地址或标签</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>创建新地址</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>复制当前选中地址到系统剪贴板</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&新建地址</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your CryptocauseCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>这是您用来收款的比特币地址。为了标记不同的资金来源,建议为每个付款人保留不同的收款地址。</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>&复制地址</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>显示二维码</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a CryptocauseCoin address</source>
<translation>签名消息,证明这个地址属于您。</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>对消息签名</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>从列表中删除选中的地址</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>导出当前数据到文件</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation>&导出</translation>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified CryptocauseCoin address</source>
<translation>验证消息,确保消息是由指定的比特币地址签名过的。</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>&验证消息</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&删除</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your CryptocauseCoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>这是您用来付款的比特币地址。在付款前,请总是核实付款金额和收款地址。</translation>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>复制 &标签</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>&编辑</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation>付款</translation>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>导出通讯录数据</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>逗号分隔文件 (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>导出错误</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>无法写入文件 %1。</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>标签</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>地址</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(没有标签)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>密码对话框</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>输入密码</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>新密码</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>重复新密码</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>输入钱包的新密码。<br/>使用的密码请至少包含<b>10个以上随机字符</>,或者是<b>8个以上的单词</b>。</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>加密钱包</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>该操作需要您首先使用密码解锁钱包。</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>解锁钱包</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>该操作需要您首先使用密码解密钱包。</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>解密钱包</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>修改密码</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>请输入钱包的旧密码与新密码。</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>确认加密钱包</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR ZETACOINS</b>!</source>
<translation>警告:如果您加密了您的钱包,但是忘记了密码,你将会<b>丢失所有的比特币</b>!</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>您确定需要为钱包加密吗?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>重要提示:您以前备份的钱包文件应该替换成最新生成的加密钱包文件(重新备份)。从安全性上考虑,您以前备份的未加密的钱包文件,在您使用新的加密钱包后将无效,请重新备份。</translation>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>警告:大写锁定键处于打开状态!</translation>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>钱包已加密</translation>
</message>
<message>
<location line="-56"/>
<source>CryptocauseCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your CryptocauseCoins from being stolen by malware infecting your computer.</source>
<translation>将关闭软件以完成加密过程。 请您谨记:钱包加密并不是万能的,电脑中毒,您的比特币还是有可能丢失。</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>钱包加密失败</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>由于一个本地错误,加密钱包操作已经失败。您的钱包没有被加密。</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>密码不匹配。</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>钱包解锁失败</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>用于解密钱包的密码不正确。</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>钱包解密失败。</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>修改钱包密码成功。</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>对&消息签名...</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>正在与网络同步...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&概况</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>显示钱包概况</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&交易记录</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>查看交易历史</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>修改存储的地址和标签列表</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>显示接收支付的地址列表</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>退出</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>退出程序</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about CryptocauseCoin</source>
<translation>显示比特币的相关信息</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>关于 &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>显示Qt相关信息</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&选项...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>&加密钱包...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&备份钱包...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&修改密码...</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation>正在从磁盘导入数据块...</translation>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation>正在为数据块建立索引...</translation>
</message>
<message>
<location line="-347"/>
<source>Send coins to a CryptocauseCoin address</source>
<translation>向一个比特币地址发送比特币</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for CryptocauseCoin</source>
<translation>设置选项</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation>备份钱包到其它文件夹</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>修改钱包加密口令</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation>&调试窗口</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>在诊断控制台调试</translation>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>&验证消息...</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>CryptocauseCoin</source>
<translation>比特币</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>钱包</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation>&发送</translation>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation>&接收</translation>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation>&地址</translation>
</message>
<message>
<location line="+22"/>
<source>&About CryptocauseCoin</source>
<translation>&关于比特币</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>&显示 / 隐藏</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation>显示或隐藏主窗口</translation>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>对钱包中的私钥加密</translation>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your CryptocauseCoin addresses to prove you own them</source>
<translation>用比特币地址关联的私钥为消息签名,以证明您拥有这个比特币地址</translation>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified CryptocauseCoin addresses</source>
<translation>校验消息,确保该消息是由指定的比特币地址所有者签名的</translation>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&文件</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&设置</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&帮助</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>分页工具栏</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+47"/>
<source>CryptocauseCoin client</source>
<translation>比特币客户端</translation>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to CryptocauseCoin network</source>
<translation><numerusform>到比特币网络的连接共有%n条</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation>No block source available...</translation>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation>%1 / %2 个交易历史的区块已下载</translation>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation>已处理 %1 个交易历史数据块。</translation>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation><numerusform>%n 小时前</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n 天前</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation><numerusform>%n 周前</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation>落后 %1 </translation>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation>最新收到的区块产生于 %1。</translation>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation>在此之后的交易尚未可见</translation>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation>错误</translation>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation>警告</translation>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation>信息</translation>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>该交易的字节数超标。您可以选择支付%1的交易费给处理您的交易的网络节点,有助于比特币网络的运行。您愿意支付这笔交易费用吗?</translation>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>最新状态</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>更新中...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation>确认交易费</translation>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>已发送交易</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>流入交易</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>日期: %1
金额: %2
类别: %3
地址: %4
</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation>URI 处理</translation>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid CryptocauseCoin address or malformed URI parameters.</source>
<translation>URI无法解析!原因可能是比特币地址不正确,或者URI参数错误。</translation>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>钱包已被<b>加密</b>,当前为<b>解锁</b>状态</translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>钱包已被<b>加密</b>,当前为<b>锁定</b>状态</translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. CryptocauseCoin can no longer continue safely and will quit.</source>
<translation>发生严重错误。</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation>网络警报</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>编辑地址</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&标签</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>与此地址条目关联的标签</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&地址</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>该地址与地址簿中的条目已关联,无法作为发送地址编辑。</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>新接收地址</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>新发送地址</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>编辑接收地址</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>编辑发送地址</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>输入的地址 "%1" 已经存在于地址簿。</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid CryptocauseCoin address.</source>
<translation>您输入的 "%1" 不是合法的比特币地址.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>无法解锁钱包</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>密钥创建失败.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>CryptocauseCoin-Qt</source>
<translation>CryptocauseCoin-Qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>版本</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>使用:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>命令行选项</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>UI选项</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>设置语言, 例如 "de_DE" (缺省: 系统语言)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>启动时最小化
</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>启动时显示版权页 (缺省: 1)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>选项</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>&主要的</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>支付交易 &费用</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start CryptocauseCoin after logging in to the system.</source>
<translation>登录系统后自动开启比特币客户端</translation>
</message>
<message>
<location line="+3"/>
<source>&Start CryptocauseCoin on system login</source>
<translation>启动时&运行</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation>恢复客户端的缺省设置</translation>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation>恢复缺省设置</translation>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation>&网络</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the CryptocauseCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>自动在路由器中打开比特币端口。只有当您的路由器开启 UPnP 选项时此功能才有效。</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>使用 &UPnP 映射端口</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the CryptocauseCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>通过代理服务器连接比特币网络(例如:通过Tor连接)</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>&通过Socks代理连接:</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>代理服务器&IP:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>代理服务器IP (如 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&端口:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>代理端口(例如 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>Socks &版本</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>Socks代理版本 (例如 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&窗口</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>最小化窗口后仅显示托盘图标</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&最小化到托盘</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>当窗口关闭时程序最小化而不是退出。当使用该选项时,程序只能通过在菜单中选择退出来关闭</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>单击关闭按钮最小化</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&显示</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>用户界面&语言:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting CryptocauseCoin.</source>
<translation>在这里设置用户界面的语言。设置将在客户端重启后生效。</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&比特币金额单位:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>选择比特币单位。</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show CryptocauseCoin addresses in the transaction list or not.</source>
<translation>是否需要在交易清单中显示比特币地址。</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>在交易清单中&显示比特币地址</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&确定</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&取消</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>&应用</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation>缺省</translation>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation>确认恢复缺省设置</translation>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation>某些设置选项需要重启客户端才能生效</translation>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation>您希望继续吗?</translation>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation>警告</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting CryptocauseCoin.</source>
<translation>需要重启客户端软件才能生效。</translation>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>提供的代理服务器地址无效。</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>表单</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the CryptocauseCoin network after a connection is established, but this process has not completed yet.</source>
<translation>现在显示的消息可能是过期的. 在连接上比特币网络节点后,您的钱包将自动与网络同步,但是这个过程还没有完成.</translation>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>余额:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>未确认:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>钱包</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation>未成熟的:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>尚未成熟的挖矿收入余额</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>最近交易记录</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>您的当前余额</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>尚未确认的交易总额, 未计入当前余额</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation>数据同步中</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start CryptocauseCoin: click-to-pay handler</source>
<translation>暂时无法启动比特币:点击支付功能</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation>二维码对话框</translation>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>请求付款</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>金额:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>标签:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>消息:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&另存为</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>将 URI 转换成二维码失败.</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>输入的金额非法,请检查。</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>URI 太长, 请试着精简标签/消息的内容.</translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation>保存二维码</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>PNG图像文件(*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>客户端名称</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation>不可用</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>客户端版本</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&信息</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>使用OpenSSL版本</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>启动时间</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>网络</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>连接数</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation>当前为比特币测试网络</translation>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>数据链</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>当前数据块数量</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>预计数据块数量</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>上一数据块时间</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&打开</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation>命令行选项</translation>
</message>
<message>
<location line="+7"/>
<source>Show the CryptocauseCoin-Qt help message to get a list with possible CryptocauseCoin command-line options.</source>
<translation>显示CryptocauseCoin命令行选项帮助信息</translation>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation>&显示</translation>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&控制台</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>创建时间</translation>
</message>
<message>
<location line="-104"/>
<source>CryptocauseCoin - Debug window</source>
<translation>比特币 - 调试窗口</translation>
</message>
<message>
<location line="+25"/>
<source>CryptocauseCoin Core</source>
<translation>比特币核心</translation>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>调试日志文件</translation>
</message>
<message>
<location line="+7"/>
<source>Open the CryptocauseCoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>打开当前目录中的调试日志文件。日志文件大的话可能要等上几秒钟。</translation>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>清空控制台</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the CryptocauseCoin RPC console.</source>
<translation>欢迎来到 RPC 控制台.</translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>使用上下方向键浏览历史, <b>Ctrl-L</b>清除屏幕.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>使用 <b>help</b> 命令显示帮助信息.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>发送货币</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>一次发送给多个接收者</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>添加收款人</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>移除所有交易项</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>清除 &所有</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>余额:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation>123.456 BTC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>确认并发送货币</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>发送</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> 到 %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>确认发送货币</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>确定您要发送 %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation> 和 </translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>收款人地址不合法,请检查。</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>支付金额必须大于0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>金额超出您的账上余额。</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>计入 %1 交易费后的金额超出您的账上余额。</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>发现重复的地址, 每次只能对同一地址发送一次.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation>错误:创建交易失败!</translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>错误: 交易被拒绝. 如果您使用的是备份钱包,可能存在两个钱包不同步的情况,另一个钱包中的比特币已经被使用,但本地的这个钱包尚没有记录。</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>表单</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>金额</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>付款&给:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>付款给这个地址 (例如 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>为这个地址输入一个标签,以便将它添加到您的地址簿</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&标签:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>从地址簿选择地址</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>从剪贴板粘贴地址</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>移除此接收者</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a CryptocauseCoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>请输入比特币地址 (例如: 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>签名 - 为消息签名/验证签名消息</translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>&签名消息</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>您可以用你的地址对消息进行签名,以证明您是该地址的所有人。注意不要对模棱两可的消息签名,以免遭受钓鱼式攻击。请确保消息内容准确的表达了您的真实意愿。</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>用于签名消息的地址(例如: 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>从地址簿选择地址</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>从剪贴板粘贴地址</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>请输入您要发送的签名消息</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation>签名</translation>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation>复制当前签名至剪切板</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this CryptocauseCoin address</source>
<translation>签名消息,证明这个地址属于您。</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>消息签名</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation>清空所有签名消息栏</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>清除 &所有</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation>&验证消息</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>在下面输入签名地址,消息(请确保换行符、空格符、制表符等等一个不漏)和签名以验证消息。请确保签名信息准确,提防中间人攻击。</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>用于签名消息的地址(例如: 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified CryptocauseCoin address</source>
<translation>验证消息,确保消息是由指定的比特币地址签名过的。</translation>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation>验证消息签名</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation>清空所有验证消息栏</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a CryptocauseCoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>请输入比特币地址 (例如: 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>单击“签名消息“产生签名。</translation>
</message>
<message>
<location line="+3"/>
<source>Enter CryptocauseCoin signature</source>
<translation>输入比特币签名</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>输入的地址非法。</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>请检查地址后重试。</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>输入的地址没有关联的公私钥对。</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>钱包解锁动作取消。</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>找不到输入地址关联的私钥。</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>消息签名失败。</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>消息已签名。</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>签名无法解码。</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>请检查签名后重试。</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>签名与消息摘要不匹配。</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>消息验证失败。</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>消息验证成功。</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+25"/>
<source>The CryptocauseCoin developers</source>
<translation>CryptocauseCoin-qt 客户端开发团队</translation>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>至 %1 个数据块时开启</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%1 / 离线</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/未确认</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 确认项</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>状态</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>通过 %n 个节点广播</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>日期</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>源</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>生成</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>来自</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>到</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>自己的地址</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>标签</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>收入</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>将在 %n 个数据块后成熟</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>未被接受</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>支出</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>交易费</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>净额</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>消息</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>备注</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>交易ID</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>新挖出的比特币必须等确120个确认才能使用。您生产出的数据块,将被广播到全网并添加到数据块链。如果入链失败,状态将变为“未被接受”,意味着您的数据块竞争失败,挖出的比特币将不能使用。当某个节点先于你几秒生产出新的数据块,这种情况会偶尔发生。</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>调试信息</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>交易</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation>输入</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>金额</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>正确</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>错误</translation>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, 未被成功广播</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Open for %n more block</numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>未知</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>交易明细</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>当前面板显示了交易的详细信息</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>日期</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>类型</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>地址</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>数量</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Open for %n more block</numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>至 %1 个数据块时开启</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>离线 (%1 个确认项)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>未确认 (%1 / %2 条确认信息)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>已确认 (%1 条确认信息)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation><numerusform>挖矿收入余额将在 %n 个数据块后可用</numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>此数据块未被其他节点接收,并可能不被接受!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>已生成但未被接受</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>接收于</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>收款来自</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>发送到</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>付款给自己</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>挖矿所得</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>交易状态。 鼠标移到此区域上可显示确认消息项的数目。</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>接收比特币的时间</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>交易类别。</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>交易目的地址。</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>从余额添加或移除的金额。</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>全部</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>今天</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>本周</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>本月</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>上月</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>今年</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>范围...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>接收于</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>发送到</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>到自己</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>挖矿所得</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>其他</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>输入地址或标签进行搜索</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>最小金额</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>复制地址</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>复制标签</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>复制金额</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>复制交易编号</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>编辑标签</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>显示交易详情</translation>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>导出交易数据</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>逗号分隔文件(*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>已确认</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>日期</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>类别</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>标签</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>地址</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>金额</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>导出错误</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>无法写入文件 %1。</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>范围:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>到</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>发送比特币</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>导出当前数据到文件</translation>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation>备份钱包</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation>钱包文件(*.dat)</translation>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation>备份失败</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>备份钱包到其它文件夹失败.</translation>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation>备份成功</translation>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation>钱包数据成功存储到新位置</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>CryptocauseCoin version</source>
<translation>比特币版本</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>使用:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or CryptocauseCoind</source>
<translation>发送命令到服务器或者 CryptocauseCoind
</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>列出命令
</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>获得某条命令的帮助
</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>选项:
</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: CryptocauseCoin.conf)</source>
<translation>指定配置文件 (默认为 CryptocauseCoin.conf)
</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: CryptocauseCoind.pid)</source>
<translation>指定 pid 文件 (默认为 CryptocauseCoind.pid)
</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>指定数据目录
</translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>设置数据库缓冲区大小 (缺省: 25MB)</translation>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 8333 or testnet: 18333)</source>
<translation>监听端口连接 <port> (缺省: 8333 or testnet: 18333)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>最大连接数 <n> (缺省: 125)</translation>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>连接一个节点并获取对端地址, 然后断开连接</translation>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation>指定您的公共地址</translation>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Threshold for disconnecting misbehaving peers (缺省: 100)</translation>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Number of seconds to keep misbehaving peers from reconnecting (缺省: 86400)</translation>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>设置RPC监听端口%u时发生错误, IPv4:%s</translation>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 8332 or testnet: 18332)</source>
<translation>JSON-RPC连接监听端口<port> (缺省:8332 testnet:18332)</translation>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>接受命令行和 JSON-RPC 命令
</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>在后台运行并接受命令
</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>使用测试网络
</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>接受来自外部的连接 (缺省: 如果不带 -proxy or -connect 参数设置为1)</translation>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=CryptocauseCoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "CryptocauseCoin Alert" [email protected]
</source>
<translation>%s, 您必须在配置文件设置rpcpassword:
%s
建议您使用下面的随机密码:
rpcuser=CryptocauseCoinrpc
rpcpassword=%s
(您无需记住此密码)
用户名和密码 必! 须! 不一样。
如果配置文件不存在,请自行建立一个只有所有者拥有只读权限的文件。
推荐您开启提示通知以便收到错误通知,
像这样: alertnotify=echo %%s | mail -s "CryptocauseCoin Alert" [email protected]
</translation>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>在IPv6模式下设置RPC监听端口 %u 失败,返回到IPv4模式: %s</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>绑定指定的IP地址开始监听。IPv6地址请使用[host]:port 格式</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. CryptocauseCoin is probably already running.</source>
<translation>无法给数据目录 %s上锁。本软件可能已经在运行。</translation>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>错误:该交易被拒绝!发生这种错误的原因可能是:钱包中的比特币已经被用掉,有可能您复制了wallet.dat钱包文件,然后用复制的钱包文件支付了比特币,但是这个钱包文件中没有记录。</translation>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>错误:因为该交易的数量、复杂度或者动用了刚收到不久的资金,您需要支付不少于%s的交易费用。</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation>当收到相关通知时执行命令(命令行中的 %s 的替换为消息)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>当最佳区块变化时执行命令 (命令行中的 %s 会被替换成区块哈希值)</translation>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</translation>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>这是测试用的预发布版本 - 请谨慎使用 - 不要用来挖矿,或者在正式商用环境下使用</translation>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>警告:-paytxfee 交易费设置得太高了!每笔交易都将支付交易费。</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>警告:显示的交易可能不正确!您需要升级客户端软件,或者网络上的其他节点需要升级。</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong CryptocauseCoin will not work properly.</source>
<translation>警告:请检查电脑的日期时间设置是否正确!时间错误可能会导致比特币客户端运行异常。</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>警告:钱包文件wallet.dat读取失败!最重要的公钥、私钥数据都没有问题,但是交易记录或地址簿数据不正确,或者存在数据丢失。</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>警告:钱包文件wallet.dat损坏! 原始的钱包文件已经备份到%s目录下并重命名为{timestamp}.bak 。如果您的账户余额或者交易记录不正确,请使用您的钱包备份文件恢复。</translation>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>尝试从损坏的钱包文件wallet.dat中恢复私钥</translation>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation>数据块创建选项:</translation>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation>仅连接到指定节点</translation>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation>检测发现数据块数据库损坏。请使用 -reindex参数重启客户端。</translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>发现自己的IP地址(缺省:不带 -externalip 参数监听时设置为1)</translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation>你想现在就重建块数据库吗?</translation>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation>初始化数据块数据库出错</translation>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation>Error initializing wallet database environment %s!</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation>导入数据块数据库出错</translation>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation>导入数据块数据库出错</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation>错误:磁盘剩余空间低!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>错误:钱包被锁定,无法创建交易!</translation>
</message>
<message>
<location line="+1"/><|fim▁hole|> <source>Error: system error: </source>
<translation>错误:系统出错。</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>监听端口失败。请使用 -listen=0 参数。</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation>无法读取数据块信息</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation>读取数据块失败</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation>无法同步数据块索引</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation>无法写入数据块索引</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation>无法写入数据块信息</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation>无法写数据块</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation>无法写入文件信息</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation>无法写入coin数据库</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation>无法写入交易索引</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation>无法写入回滚信息</translation>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation>通过DNS查找节点(缺省:1 除非使用 -connect 选项)</translation>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation>启动时检测多少个数据块(缺省:288,0=所有)</translation>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation>How thorough the block verification is (0-4, default: 3)</translation>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>重新为当前的blk000??.dat文件建立索引</translation>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation>设置使用调用服务 RPC 的线程数量(默认:4)</translation>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation>正在验证数据库的完整性...</translation>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation>正在检测钱包的完整性...</translation>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation>从blk000??.dat文件导入数据块</translation>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation>信息</translation>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>非法的 -tor 地址:'%s' </translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation>维护一份完整的交易索引(缺省:0)</translation>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>每个连接的最大接收缓存,<n>*1000 字节(缺省:5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>每个连接的最大发送缓存,<n>*1000 字节(缺省:1000)</translation>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation>仅接受符合客户端检查点设置的数据块文件</translation>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>仅连接至指定网络的节点<net>(IPv4, IPv6 或者 Tor)</translation>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>输出额外的调试信息。打开所有 -debug* 开关</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation>输出额外的网络调试信息</translation>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation>为调试输出信息添加时间戳</translation>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the CryptocauseCoin Wiki for SSL setup instructions)</source>
<translation>SSL选项:(参见CryptocauseCoin Wiki关于SSL设置栏目)</translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation>请选择Socks代理服务器版本 (4 或 5, 缺省: 5)</translation>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>跟踪/调试信息输出到控制台,不输出到debug.log文件</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>跟踪/调试信息输出到 调试器debugger</translation>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation>设置最大数据块大小(缺省:250000)</translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>设置最小数据块大小(缺省:0)</translation>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>客户端启动时压缩debug.log文件(缺省:no-debug模式时为1)</translation>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>设置连接超时时间(缺省:5000毫秒)</translation>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation>系统错误:</translation>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>使用UPnp映射监听端口(缺省: 0)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>使用UPnp映射监听端口(缺省: 监听状态设为1)</translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation>使用代理服务器访问隐藏服务(缺省:同 -proxy)</translation>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>JSON-RPC连接用户名
</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation>警告</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>警告:该软件版本已过时,请升级!</translation>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation>You need to rebuild the databases using -reindex to change -txindex</translation>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>钱包文件wallet.dat损坏,抢救备份失败</translation>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>JSON-RPC连接密码
</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>允许从指定IP接受到的JSON-RPC连接
</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>向IP地址为 <ip> 的节点发送指令 (缺省: 127.0.0.1)
</translation>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>当最佳数据块变化时执行命令 (命令行中的 %s 会被替换成数据块哈希值)</translation>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation>将钱包升级到最新的格式</translation>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>设置密钥池大小为 <n> (缺省: 100)
</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>重新扫描数据链以查找遗漏的交易
</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>为 JSON-RPC 连接使用 OpenSSL (https)连接</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation>服务器证书 (默认为 server.cert)
</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>服务器私钥 (默认为 server.pem)
</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>可接受的加密器 (默认为 TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)
</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>该帮助信息
</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>无法绑定本机端口 %s (返回错误消息 %d, %s)</translation>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation>通过 socks 代理连接</translation>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>使用 -addnode, -seednode 和 -connect选项时允许DNS查找</translation>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>正在加载地址...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>wallet.dat钱包文件加载错误:钱包损坏</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of CryptocauseCoin</source>
<translation>wallet.dat钱包文件加载错误:请升级到最新CryptocauseCoin客户端</translation>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart CryptocauseCoin to complete</source>
<translation>钱包文件需要重写:请退出并重新启动CryptocauseCoin客户端</translation>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation>wallet.dat钱包文件加载错误</translation>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>非法的代理地址: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>被指定的是未知网络 -onlynet: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>被指定的是未知socks代理版本: %i</translation>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>无法解析 -bind 端口地址: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>无法解析 -externalip 地址: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>非法金额 -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation>金额不对</translation>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation>金额不足</translation>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>加载数据块索引...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>添加节点并与其保持连接</translation>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. CryptocauseCoin is probably already running.</source>
<translation>无法在本机绑定 %s 端口 . 比特币客户端软件可能已经在运行.</translation>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation>每发送1KB交易所需的费用</translation>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>正在加载钱包...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation>无法降级钱包格式</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation>无法写入缺省地址</translation>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>正在重新扫描...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>加载完成</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation>使用 %s 选项</translation>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>错误</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>您必须在配置文件中加入选项 rpcpassword :
%s
如果配置文件不存在,请新建,并将文件权限设置为仅允许文件所有者读取.</translation>
</message>
</context>
</TS><|fim▁end|> | |
<|file_name|>p051.rs<|end_file_name|><|fim▁begin|>//! [Problem 51](https://projecteuler.net/problem=51) solver.
#![warn(
bad_style,
unused,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
use integer::Integer;
use prime::PrimeSet;
fn compute(num_value: usize) -> u64 {
let radix = 10;
let ps = PrimeSet::new();
for p in &ps {
let ds = p.into_digits(radix as u64);
let hs = p.into_digit_histogram();
for (d_src, &cnt) in hs.iter().enumerate() {
// Skip digits that are appeared less than twice.
if cnt <= 1 {
continue;
}
<|fim▁hole|> let mut num_prime = 1;
for d_dst in (d_src + 1)..radix {
if radix - d_dst < num_value - num_prime {
break;
}
let it = ds
.clone()
.map(|d| if d == (d_src as u64) { d_dst as u64 } else { d });
if ps.contains(Integer::from_digits(it, radix as u64)) {
num_prime += 1;
}
}
if num_prime >= num_value {
return p;
}
}
}
unreachable!()
}
fn solve() -> String {
compute(8).to_string()
}
common::problem!("121313", solve);
#[cfg(test)]
mod tests {
#[test]
fn seven() {
assert_eq!(56003, super::compute(7))
}
}<|fim▁end|> | |
<|file_name|>ReplicationNodeOptions.java<|end_file_name|><|fim▁begin|>/*
* Copyright © 2009 HotPads ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datarouter.virtualnode.replication;
import java.util.Optional;
import io.datarouter.storage.node.tableconfig.NodewatchConfigurationBuilder;
public class ReplicationNodeOptions{
public final Optional<String> tableName;
public final Optional<Integer> everyNToPrimary;
public final Optional<Boolean> disableForcePrimary;
public final Optional<Boolean> disableIntroducer;
public final Optional<NodewatchConfigurationBuilder> nodewatchConfigurationBuilder;
private ReplicationNodeOptions(
Optional<String> tableName,
Optional<Integer> everyNToPrimary,
Optional<Boolean> disableForcePrimary,
Optional<Boolean> disableIntroducer,
Optional<NodewatchConfigurationBuilder> nodewatchConfigurationBuilder){
this.tableName = tableName;
this.everyNToPrimary = everyNToPrimary;
this.disableForcePrimary = disableForcePrimary;
this.disableIntroducer = disableIntroducer;
this.nodewatchConfigurationBuilder = nodewatchConfigurationBuilder;
}
public static class ReplicationNodeOptionsBuilder{
public Optional<String> tableName = Optional.empty();
public Optional<Integer> everyNToPrimary = Optional.empty();
public Optional<Boolean> disableForcePrimary = Optional.empty();
public Optional<Boolean> disableIntroducer = Optional.empty();
public Optional<NodewatchConfigurationBuilder> nodewatchConfigurationBuilder = Optional.empty();
public ReplicationNodeOptionsBuilder withTableName(String tableName){
this.tableName = Optional.of(tableName);
return this;
}
public ReplicationNodeOptionsBuilder withEveryNToPrimary(Integer everyNToPrimary){
this.everyNToPrimary = Optional.of(everyNToPrimary);
return this;
}
public ReplicationNodeOptionsBuilder withDisableForcePrimary(boolean disableForcePrimary){
this.disableForcePrimary = Optional.of(disableForcePrimary);
return this;
}
<|fim▁hole|> public ReplicationNodeOptionsBuilder withDisableIntroducer(boolean disableIntroducer){
this.disableIntroducer = Optional.of(disableIntroducer);
return this;
}
public ReplicationNodeOptionsBuilder withNodewatchConfigurationBuilder(
NodewatchConfigurationBuilder nodewatchConfigurationBuilder){
this.nodewatchConfigurationBuilder = Optional.of(nodewatchConfigurationBuilder);
return this;
}
public ReplicationNodeOptions build(){
return new ReplicationNodeOptions(
tableName,
everyNToPrimary,
disableForcePrimary,
disableIntroducer,
nodewatchConfigurationBuilder);
}
}
}<|fim▁end|> | |
<|file_name|>test_loaders.py<|end_file_name|><|fim▁begin|># AMDG
import unittest
from datetime import datetime
from balance import BasicLoader, RepayLoader
from base_test import BaseTest
class LoaderTests(BaseTest, unittest.TestCase):
def test_basic_loader(self):
loader = BasicLoader('tests/data/basic_loader')
entries, errors = loader.load(return_errors=True)
self.assertEquals(1, len(entries))
entry = entries[0]
self.assertEquals(-5.00, entry.amount)
self.assertEquals(2, len(errors))
self.assertEquals(errors[0]['entry'], '\n')
self.assertTrue(errors[0]['error'].message.startswith('Not a valid entry'))
self.assertEquals(errors[1]['entry'], 'this is a bad line:\n')
self.assertTrue(errors[1]['error'].message.startswith('Not a valid entry'))
def test_repay_loader(self):
loader = RepayLoader('tests/data/repay_loader')
entries, errors = loader.load(return_errors=True)
self.assertEquals(4, len(entries))
entry = entries.pop()
self.assertEquals(-11.00, entry.amount)
self.assertEquals('repay', entry.category)
self.assertEquals('#2', entry.description)
self.assertEquals('Joe', entry.vendor)
self.assertEquals('cash', entry.method)
self.assertEquals(datetime(2014,10,3), entry.date)
for e in entries:
self.assertTrue(e.method in RepayLoader.methods)
self.assertEquals(2, len(errors))
self.assertEquals(errors[0]['entry'], '#hello\n')
self.assertTrue(errors[0]['error'].message.startswith('Not a valid entry'))<|fim▁hole|> self.assertEquals(errors[1]['entry'], 'bad line\n')
self.assertTrue(errors[1]['error'].message.startswith('Not a valid entry'))
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>segment_to_polyline.rs<|end_file_name|><|fim▁begin|>use super::ToPolyline;
use crate::procedural::Polyline;
use crate::shape::Segment;
use simba::scalar::RealField;<|fim▁hole|>
fn to_polyline(&self, _: ()) -> Polyline<N> {
Polyline::new(vec![self.a, self.b], None)
}
}<|fim▁end|> |
impl<N: RealField> ToPolyline<N> for Segment<N> {
type DiscretizationParameter = (); |
<|file_name|>LogHelper.java<|end_file_name|><|fim▁begin|>package com.ash6390.jarcraft.utility;
import com.ash6390.jarcraft.reference.References;
import cpw.mods.fml.common.FMLLog;
import org.apache.logging.log4j.Level;
public class LogHelper
{
public static void log(Level logLevel, Object object)
{
FMLLog.log(References.NAME, logLevel, String.valueOf(object));
}
public static void all(Object object) { log(Level.ALL, object); }
public static void debug(Object object) { log(Level.DEBUG, object); }
<|fim▁hole|> public static void error(Object object) { log(Level.ERROR, object); }
public static void fatal(Object object) { log(Level.FATAL, object); }
public static void info(Object object) { log(Level.INFO, object); }
public static void off(Object object) { log(Level.OFF, object); }
public static void trace(Object object) { log(Level.TRACE, object); }
public static void warn(Object object) { log(Level.WARN, object); }
}<|fim▁end|> | |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for admin_readonly_model project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
<|fim▁hole|>from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "admin_readonly_model.settings")
application = get_wsgi_application()<|fim▁end|> | |
<|file_name|>Half_Year_End_Analysis.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Thu Jan 03 10:16:39 2013
@author: Grahesh
<|fim▁hole|>import pandas
from qstkutil import DataAccess as da
import numpy as np
import math
import copy
import qstkutil.qsdateutil as du
import datetime as dt
import qstkutil.DataAccess as da
import qstkutil.tsutil as tsu
import qstkstudy.EventProfiler as ep
"""
Accepts a list of symbols along with start and end date
Returns the Event Matrix which is a pandas Datamatrix
Event matrix has the following structure :
|IBM |GOOG|XOM |MSFT| GS | JP |
(d1)|nan |nan | 1 |nan |nan | 1 |
(d2)|nan | 1 |nan |nan |nan |nan |
(d3)| 1 |nan | 1 |nan | 1 |nan |
(d4)|nan | 1 |nan | 1 |nan |nan |
...................................
...................................
Also, d1 = start date
nan = no information about any event.
1 = status bit(positively confirms the event occurence)
"""
# Get the data from the data store
storename = "NSEData" # get data from our daily prices source
# Available field names: open, close, high, low, close, actual_close, volume
closefield = "close"
volumefield = "volume"
window = 10
def getHalfYearEndDates(timestamps):
newTS=[]
tempYear=timestamps[0].year
flag=1
for x in range(0, len(timestamps)-1):
if(timestamps[x].year==tempYear):
if(timestamps[x].month==4 and flag==1):
newTS.append(timestamps[x-1])
flag=0
if(timestamps[x].month==10):
newTS.append(timestamps[x-1])
tempYear=timestamps[x].year+1
flag=1
return newTS
def findEvents(symbols, startday,endday, marketSymbol,verbose=False):
# Reading the Data for the list of Symbols.
timeofday=dt.timedelta(hours=16)
timestamps = du.getNSEdays(startday,endday,timeofday)
endOfHalfYear=getHalfYearEndDates(timestamps)
dataobj = da.DataAccess('NSEData')
if verbose:
print __name__ + " reading data"
# Reading the Data
close = dataobj.get_data(timestamps, symbols, closefield)
# Completing the Data - Removing the NaN values from the Matrix
close = (close.fillna(method='ffill')).fillna(method='backfill')
# Calculating Daily Returns for the Market
tsu.returnize0(close.values)
# Calculating the Returns of the Stock Relative to the Market
# So if a Stock went up 5% and the Market rised 3%. The the return relative to market is 2%
mktneutDM = close - close[marketSymbol]
np_eventmat = copy.deepcopy(mktneutDM)
for sym in symbols:
for time in timestamps:
np_eventmat[sym][time]=np.NAN
if verbose:
print __name__ + " finding events"
# Generating the Event Matrix
# Event described is : Analyzing half year events for given stocks.
for symbol in symbols:
for i in endOfHalfYear:
np_eventmat[symbol][i] = 1.0 #overwriting by the bit, marking the event
return np_eventmat
#################################################
################ MAIN CODE ######################
#################################################
symbols = np.loadtxt('NSE500port.csv',dtype='S13',comments='#', skiprows=1)
# You might get a message about some files being missing, don't worry about it.
#symbols =['SPY','BFRE','ATCS','RSERF','GDNEF','LAST','ATTUF','JBFCF','CYVA','SPF','XPO','EHECF','TEMO','AOLS','CSNT','REMI','GLRP','AIFLY','BEE','DJRT','CHSTF','AICAF']
#symbols=['NSE','3MINDIA.NS','AARTIIND.NS','ABAN.NS','ABB.NS','ABGSHIP.NS','ABIRLANUV.NS','ACC.NS','ADANIENT.NS','ADANIPORT.NS','ADANIPOWE.NS','ADVANTA.NS','ALLCARGO.NS','AIAENG.NS','AIL.NS','AZKOINDIA.NS']
startday = dt.datetime(2011,1,1)
endday = dt.datetime(2012,1,1)
eventMatrix = findEvents(symbols,startday,endday,marketSymbol='NSE500',verbose=True)
eventMatrix.to_csv('eventmatrix.csv', sep=',')
eventProfiler = ep.EventProfiler(eventMatrix,startday,endday,lookback_days=20,lookforward_days=20,verbose=True)
eventProfiler.study(filename="HalfYearEventStudy.jpg",plotErrorBars=True,plotMarketNeutral=True,plotEvents=False,marketSymbol='NSE500')<|fim▁end|> | """
|
<|file_name|>document_page_history_workflow.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Savoir-faire Linux (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
from openerp import models, fields, api
class DocumentPageHistoryWorkflow(models.Model):
"""Useful to manage edition's workflow on a document."""
_inherit = 'document.page.history'
@api.multi
def page_approval_draft(self):
"""Set a document state as draft and notified the reviewers."""
self.write({'state': 'draft'})
template = self.env.ref(
'document_page_approval.email_template_new_draft_need_approval')
for page in self:
if page.is_parent_approval_required:
template.send_mail(page.id, force_send=True)
return True<|fim▁hole|> @api.multi
def page_approval_approved(self):
"""Set a document state as approve."""
message_obj = self.env['mail.message']
self.write({
'state': 'approved',
'approved_date': datetime.now().strftime(
DEFAULT_SERVER_DATETIME_FORMAT),
'approved_uid': self.env.uid
})
# Notify followers a new version is available
for page_history in self:
subtype = self.env.ref('mail.mt_comment')
message_obj.create(
{'res_id': page_history.page_id.id,
'model': 'document.page',
'subtype_id': subtype.id,
'body': _('New version of the document %s'
' approved.') % page_history.page_id.name
}
)
return True
@api.multi
def _can_user_approve_page(self):
"""Check if a user cas approve the page."""
user = self.env.user
for page in self:
page.can_user_approve_page = page.can_user_approve_this_page(
page.page_id,
user
)
def can_user_approve_this_page(self, page, user):
"""Check if a user can approved the page."""
if page:
res = page.approver_gid in user.groups_id
res = res or self.can_user_approve_this_page(page.parent_id, user)
else:
res = False
return res
@api.multi
def get_approvers_guids(self):
"""Return the approvers group."""
res = {}
for page in self:
res[page.id] = self.get_approvers_guids_for_page(page.page_id)
return res
def get_approvers_guids_for_page(self, page):
"""Return the approvers group for a page."""
if page:
if page.approver_gid:
res = [page.approver_gid.id]
else:
res = []
res.extend(self.get_approvers_guids_for_page(page.parent_id))
else:
res = []
return res
@api.multi
def _get_approvers_email(self):
"""Get the approvers email."""
for page in self:
emails = ''
guids = self.get_approvers_guids()
uids = [i.id for i in self.env['res.users'].search([
('groups_id', 'in', guids[page.id])
])]
users = self.env['res.users'].browse(uids)
for user in users:
if user.email:
emails += user.email
emails += ','
else:
empl = self.env['hr.employee'].search([
('login', '=', user.login)
])
if empl.work_email:
emails += empl.work_email
emails += ','
page.get_approvers_email = emails[:-1]
@api.multi
def _get_page_url(self):
"""Get the page url."""
for page in self:
base_url = self.env['ir.config_parameter'].get_param(
'web.base.url',
default='http://localhost:8069'
)
page.get_page_url = (
'{}/web#db={}&id={}&view_type=form&'
'model=document.page.history').format(
base_url,
self.env.cr.dbname,
page.id
)
state = fields.Selection(
[('draft', 'Draft'), ('approved', 'Approved')],
'Status',
readonly=True
)
approved_date = fields.Datetime("Approved Date")
approved_uid = fields.Many2one(
'res.users',
"Approved By"
)
is_parent_approval_required = fields.Boolean(
related='page_id.is_parent_approval_required',
string="parent approval",
store=False
)
can_user_approve_page = fields.Boolean(
compute=_can_user_approve_page,
string="can user approve this page",
store=False
)
get_approvers_email = fields.Text(
compute=_get_approvers_email,
string="get all approvers email",
store=False
)
get_page_url = fields.Text(
compute=_get_page_url,
string="URL",
store=False
)<|fim▁end|> | |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>try:
from django.apps import AppConfig
except ImportError:
pass
else:
class GargoyleAppConfig(AppConfig):
name = 'gargoyle'
def ready(self):
try:
import nexus<|fim▁hole|> except ImportError:
pass
else:
from gargoyle.nexus_modules import GargoyleModule
nexus.site.register(GargoyleModule, 'gargoyle')<|fim▁end|> | |
<|file_name|>port1.js<|end_file_name|><|fim▁begin|>// port.js
class SingleData {
constructor (port, order, type, value) {
this.port = port
this.order = order
this.type = type
this.value = value
}
}
export let inputVariables = []
export let countVariables = []
// Add a new port
export function Add (countInputPort) {
countInputPort++
inputVariables[countInputPort] = []
countVariables[countInputPort] = 0
$('div#inputPortList').append(
`<div class="list-group-item list-group-item-action" data-toggle="modal"
data-target="#addNewModal${countInputPort}" id="inputPort${countInputPort}">
Port ${countInputPort}</div>`
)
$(`#inputPort${countInputPort}`).click(function () {
portDetail(countInputPort)
})
return true
}
// Show Details of Port
export function portDetail (countInputPort) {
let container = ''
let order = countVariables[countInputPort]
// Show exist variables
for (let variable of inputVariables[countInputPort]) {
container += `<li class="list-group-item list-group-item-action">
<p class="mb-1 float-left text-primary">${variable.order + 1} </p>
<p class="mb-1 float-left variable-type"><label class="variable-type" order="${variable.order}">
${variable.type}</label> </p>
<p class="mb-1 float-left">
<input type="text" class="form-control variable-value" order="${variable.order}" value="${variable.value}">
</p>
</li>`
}
// Show variables list
$('div#modalArea').html(
`<div class="modal fade" id="addNewModal${countInputPort}" tabindex="-1" role="dialog"
aria-labelledby="addNewModalLabel" aria-hidden="true">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="addNewModalLabel">Port ${countInputPort}</h5>
<button class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">
<div class="form-row" id="globalDt">
<select class="form-control col-md-7 mx-sm-3 mb-3" id="dt">
<option value="Numeric">Numeric</option>
<option value="Character">Character</option>
<option value="Enumeration">Enumeration</option>
<option value="Boolean">Boolean</option>
<option value="Set">Set</option>
<option value="Sequence">Sequence</option>
<option value="String">String</option>
<option value="Composite">Composite</option>
<option value="Product">Product</option>
<option value="Map">Map</option>
<option value="Union">Union</option>
<option value="Class">Class</option>
</select>
<button class="btn btn-outline-primary col-md-4 mb-3" id="addVariable">Add</button>
</div>
<!-- list of data types -->
<div>
<ul class="list-group" id="variables">
${container}
</ul>
</div>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-primary" id="savePort">Save changes</button>
<button class="btn btn-secondary" data-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>`
)
// Add a new variables
$('button#addVariable').click(function () {
let selectedValue = $('select#dt').val()
console.log(order)
$('ul#variables').append(
`<li class="list-group-item list-group-item-action">
<p class="mb-1 float-left text-primary">${order + 1} </p>
<p class="mb-1 float-left variable-type"><label class="variable-type" order=${order}>
${selectedValue}</label> </p>
<p class="mb-1 float-left">
<input type="text" class="form-control variable-value" order="${order}" placeholder="${selectedValue}">
</p>
</li>`
)
order++
})
// Save port
$('button#savePort').click(function () {
let i
for (i = 0; i < order; i++) {<|fim▁hole|> let type = $(`label.variable-type[order$="${i}"]`).text()
let value = $(`input.variable-value[order$="${i}"]`).val()
// console.log(type + '\n' + value)
inputVariables[countInputPort][i] = new SingleData(countInputPort, i, type, value)
console.log(`saved:
port: ${countInputPort}
order: ${i}
type: ${type}
value: ${value}`)
}
countVariables[countInputPort] = i
console.log('total: ' + countVariables[countInputPort])
})
}
export function Update (id, value) {
let editId = 'div#' + id
$(editId).text(value)
}<|fim▁end|> | |
<|file_name|>DictItemDaoImpl.java<|end_file_name|><|fim▁begin|>package ltf.namerank.dao.fs;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.TypeReference;
import ltf.namerank.dao.DictItemDao;
import ltf.namerank.entity.DictItem;
import ltf.namerank.entity.DictItem_Bm8;
import ltf.namerank.utils.PathUtils;
import org.springframework.stereotype.Component;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static ltf.namerank.utils.FileUtils.file2Str;
import static ltf.namerank.utils.FileUtils.str2File;
/**
* @author ltf
* @since 6/11/16, 5:17 PM
*/
@Component
public class DictItemDaoImpl implements DictItemDao {<|fim▁hole|>
Map<String, DictItem> items = new HashMap<>();
if (f.exists()) {
try {
items = JSON.parseObject(file2Str(f), items.getClass());
} catch (IOException e) {
e.printStackTrace();
}
}
items.put(dictItem.getItemType(), dictItem);
try {
str2File(JSON.toJSONString(items, true), f);
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public DictItem loadItemsByZi(String zi) {
File f = new File(PathUtils.getJsonHome() + "/dict", zi);
if (!f.exists()) return null;
List<DictItem> list = new ArrayList<>(2);
Map<String, DictItem_Bm8> items = new HashMap<>();
try {
items = JSON.parseObject(file2Str(f), new TypeReference<Map<String, DictItem_Bm8>>() {
});
return items.get("DictItem_Bm8");
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}<|fim▁end|> |
@Override
public void saveDictItem(DictItem dictItem) {
File f = new File(PathUtils.getJsonHome() + "/dict", dictItem.getZi()); |
<|file_name|>tfr.py<|end_file_name|><|fim▁begin|>"""A module which implements the time-frequency estimation.
Morlet code inspired by Matlab code from Sheraz Khan & Brainstorm & SPM
"""
# Authors : Alexandre Gramfort <[email protected]>
# Hari Bharadwaj <[email protected]>
# Clement Moutard <[email protected]>
# Jean-Remi King <[email protected]>
#
# License : BSD (3-clause)
from copy import deepcopy
from functools import partial
from math import sqrt
import numpy as np
from scipy import linalg
from scipy.fftpack import fft, ifft
from ..baseline import rescale
from ..parallel import parallel_func
from ..utils import logger, verbose, _time_mask, check_fname, sizeof_fmt
from ..channels.channels import ContainsMixin, UpdateChannelsMixin
from ..channels.layout import _pair_grad_sensors
from ..io.pick import pick_info, pick_types
from ..io.meas_info import Info
from ..utils import SizeMixin
from .multitaper import dpss_windows
from ..viz.utils import figure_nobar, plt_show, _setup_cmap
from ..externals.h5io import write_hdf5, read_hdf5
from ..externals.six import string_types
# Make wavelet
def morlet(sfreq, freqs, n_cycles=7.0, sigma=None, zero_mean=False):
"""Compute Morlet wavelets for the given frequency range.
Parameters
----------
sfreq : float
The sampling Frequency.
freqs : array
frequency range of interest (1 x Frequencies)
n_cycles: float | array of float, defaults to 7.0
Number of cycles. Fixed number or one per frequency.
sigma : float, defaults to None
It controls the width of the wavelet ie its temporal
resolution. If sigma is None the temporal resolution
is adapted with the frequency like for all wavelet transform.
The higher the frequency the shorter is the wavelet.
If sigma is fixed the temporal resolution is fixed
like for the short time Fourier transform and the number
of oscillations increases with the frequency.
zero_mean : bool, defaults to False
Make sure the wavelet has a mean of zero.
Returns
-------
Ws : list of array
The wavelets time series.
"""
Ws = list()
n_cycles = np.atleast_1d(n_cycles)
if (n_cycles.size != 1) and (n_cycles.size != len(freqs)):
raise ValueError("n_cycles should be fixed or defined for "
"each frequency.")
for k, f in enumerate(freqs):
if len(n_cycles) != 1:
this_n_cycles = n_cycles[k]
else:
this_n_cycles = n_cycles[0]
# fixed or scale-dependent window
if sigma is None:
sigma_t = this_n_cycles / (2.0 * np.pi * f)
else:
sigma_t = this_n_cycles / (2.0 * np.pi * sigma)
# this scaling factor is proportional to (Tallon-Baudry 98):
# (sigma_t*sqrt(pi))^(-1/2);
t = np.arange(0., 5. * sigma_t, 1.0 / sfreq)
t = np.r_[-t[::-1], t[1:]]
oscillation = np.exp(2.0 * 1j * np.pi * f * t)
gaussian_enveloppe = np.exp(-t ** 2 / (2.0 * sigma_t ** 2))
if zero_mean: # to make it zero mean
real_offset = np.exp(- 2 * (np.pi * f * sigma_t) ** 2)
oscillation -= real_offset
W = oscillation * gaussian_enveloppe
W /= sqrt(0.5) * linalg.norm(W.ravel())
Ws.append(W)
return Ws
def _make_dpss(sfreq, freqs, n_cycles=7., time_bandwidth=4.0, zero_mean=False):
"""Compute DPSS tapers for the given frequency range.
Parameters
----------
sfreq : float
The sampling frequency.
freqs : ndarray, shape (n_freqs,)
The frequencies in Hz.
n_cycles : float | ndarray, shape (n_freqs,), defaults to 7.
The number of cycles globally or for each frequency.
time_bandwidth : float, defaults to 4.0
Time x Bandwidth product.
The number of good tapers (low-bias) is chosen automatically based on
this to equal floor(time_bandwidth - 1).
Default is 4.0, giving 3 good tapers.
zero_mean : bool | None, , defaults to False
Make sure the wavelet has a mean of zero.
Returns
-------
Ws : list of array
The wavelets time series.
"""
Ws = list()
if time_bandwidth < 2.0:
raise ValueError("time_bandwidth should be >= 2.0 for good tapers")
n_taps = int(np.floor(time_bandwidth - 1))
n_cycles = np.atleast_1d(n_cycles)
if n_cycles.size != 1 and n_cycles.size != len(freqs):
raise ValueError("n_cycles should be fixed or defined for "
"each frequency.")
for m in range(n_taps):
Wm = list()
for k, f in enumerate(freqs):
if len(n_cycles) != 1:
this_n_cycles = n_cycles[k]
else:
this_n_cycles = n_cycles[0]
t_win = this_n_cycles / float(f)
t = np.arange(0., t_win, 1.0 / sfreq)
# Making sure wavelets are centered before tapering
oscillation = np.exp(2.0 * 1j * np.pi * f * (t - t_win / 2.))
# Get dpss tapers
tapers, conc = dpss_windows(t.shape[0], time_bandwidth / 2.,
n_taps)
Wk = oscillation * tapers[m]
if zero_mean: # to make it zero mean
real_offset = Wk.mean()
Wk -= real_offset
Wk /= sqrt(0.5) * linalg.norm(Wk.ravel())
Wm.append(Wk)
Ws.append(Wm)
return Ws
# Low level convolution
def _cwt(X, Ws, mode="same", decim=1, use_fft=True):
"""Compute cwt with fft based convolutions or temporal convolutions.
Parameters
----------
X : array of shape (n_signals, n_times)
The data.
Ws : list of array
Wavelets time series.
mode : {'full', 'valid', 'same'}
See numpy.convolve.
decim : int | slice, defaults to 1
To reduce memory usage, decimation factor after time-frequency
decomposition.
If `int`, returns tfr[..., ::decim].
If `slice`, returns tfr[..., decim].
.. note:: Decimation may create aliasing artifacts.
use_fft : bool, defaults to True
Use the FFT for convolutions or not.
Returns
-------
out : array, shape (n_signals, n_freqs, n_time_decim)
The time-frequency transform of the signals.
"""
if mode not in ['same', 'valid', 'full']:
raise ValueError("`mode` must be 'same', 'valid' or 'full', "
"got %s instead." % mode)
if mode == 'full' and (not use_fft):
# XXX JRK: full wavelet decomposition needs to be implemented
raise ValueError('`full` decomposition with convolution is currently' +
' not supported.')
decim = _check_decim(decim)
X = np.asarray(X)
# Precompute wavelets for given frequency range to save time
n_signals, n_times = X.shape
n_times_out = X[:, decim].shape[1]
n_freqs = len(Ws)
Ws_max_size = max(W.size for W in Ws)
size = n_times + Ws_max_size - 1
# Always use 2**n-sized FFT
fsize = 2 ** int(np.ceil(np.log2(size)))
# precompute FFTs of Ws
if use_fft:
fft_Ws = np.empty((n_freqs, fsize), dtype=np.complex128)
for i, W in enumerate(Ws):
if len(W) > n_times:
raise ValueError('At least one of the wavelets is longer than the '
'signal. Use a longer signal or shorter '
'wavelets.')
if use_fft:
fft_Ws[i] = fft(W, fsize)
# Make generator looping across signals
tfr = np.zeros((n_freqs, n_times_out), dtype=np.complex128)
for x in X:
if use_fft:
fft_x = fft(x, fsize)
# Loop across wavelets
for ii, W in enumerate(Ws):
if use_fft:
ret = ifft(fft_x * fft_Ws[ii])[:n_times + W.size - 1]
else:
ret = np.convolve(x, W, mode=mode)
# Center and decimate decomposition
if mode == "valid":
sz = int(abs(W.size - n_times)) + 1
offset = (n_times - sz) // 2
this_slice = slice(offset // decim.step,
(offset + sz) // decim.step)
if use_fft:
ret = _centered(ret, sz)
tfr[ii, this_slice] = ret[decim]
else:
if use_fft:
ret = _centered(ret, n_times)
tfr[ii, :] = ret[decim]
yield tfr
# Loop of convolution: single trial
def _compute_tfr(epoch_data, frequencies, sfreq=1.0, method='morlet',
n_cycles=7.0, zero_mean=None, time_bandwidth=None,
use_fft=True, decim=1, output='complex', n_jobs=1,
verbose=None):
"""Compute time-frequency transforms.
Parameters
----------
epoch_data : array of shape (n_epochs, n_channels, n_times)
The epochs.
frequencies : array-like of floats, shape (n_freqs)
The frequencies.
sfreq : float | int, defaults to 1.0
Sampling frequency of the data.
method : 'multitaper' | 'morlet', defaults to 'morlet'
The time-frequency method. 'morlet' convolves a Morlet wavelet.
'multitaper' uses Morlet wavelets windowed with multiple DPSS
multitapers.
n_cycles : float | array of float, defaults to 7.0
Number of cycles in the Morlet wavelet. Fixed number
or one per frequency.
zero_mean : bool | None, defaults to None
None means True for method='multitaper' and False for method='morlet'.
If True, make sure the wavelets have a mean of zero.
time_bandwidth : float, defaults to None
If None and method=multitaper, will be set to 4.0 (3 tapers).
Time x (Full) Bandwidth product. Only applies if
method == 'multitaper'. The number of good tapers (low-bias) is
chosen automatically based on this to equal floor(time_bandwidth - 1).
use_fft : bool, defaults to True
Use the FFT for convolutions or not.
decim : int | slice, defaults to 1
To reduce memory usage, decimation factor after time-frequency
decomposition.
If `int`, returns tfr[..., ::decim].
If `slice`, returns tfr[..., decim].
.. note::
Decimation may create aliasing artifacts, yet decimation
is done after the convolutions.
output : str, defaults to 'complex'
* 'complex' : single trial complex.
* 'power' : single trial power.
* 'phase' : single trial phase.
* 'avg_power' : average of single trial power.
* 'itc' : inter-trial coherence.
* 'avg_power_itc' : average of single trial power and inter-trial
coherence across trials.
n_jobs : int, defaults to 1
The number of epochs to process at the same time. The parallelization
is implemented across channels.
verbose : bool, str, int, or None, defaults to None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
out : array
Time frequency transform of epoch_data. If output is in ['complex',
'phase', 'power'], then shape of out is (n_epochs, n_chans, n_freqs,
n_times), else it is (n_chans, n_freqs, n_times). If output is
'avg_power_itc', the real values code for 'avg_power' and the
imaginary values code for the 'itc': out = avg_power + i * itc
"""
# Check data
epoch_data = np.asarray(epoch_data)
if epoch_data.ndim != 3:
raise ValueError('epoch_data must be of shape '
'(n_epochs, n_chans, n_times)')
# Check params
frequencies, sfreq, zero_mean, n_cycles, time_bandwidth, decim = \
_check_tfr_param(frequencies, sfreq, method, zero_mean, n_cycles,
time_bandwidth, use_fft, decim, output)
# Setup wavelet
if method == 'morlet':
W = morlet(sfreq, frequencies, n_cycles=n_cycles, zero_mean=zero_mean)
Ws = [W] # to have same dimensionality as the 'multitaper' case
elif method == 'multitaper':
Ws = _make_dpss(sfreq, frequencies, n_cycles=n_cycles,
time_bandwidth=time_bandwidth, zero_mean=zero_mean)
# Check wavelets
if len(Ws[0][0]) > epoch_data.shape[2]:
raise ValueError('At least one of the wavelets is longer than the '
'signal. Use a longer signal or shorter wavelets.')
# Initialize output
decim = _check_decim(decim)
n_freqs = len(frequencies)
n_epochs, n_chans, n_times = epoch_data[:, :, decim].shape
if output in ('power', 'phase', 'avg_power', 'itc'):
dtype = np.float
elif output in ('complex', 'avg_power_itc'):
# avg_power_itc is stored as power + 1i * itc to keep a
# simple dimensionality
dtype = np.complex
if ('avg_' in output) or ('itc' in output):
out = np.empty((n_chans, n_freqs, n_times), dtype)
else:
out = np.empty((n_chans, n_epochs, n_freqs, n_times), dtype)
# Parallel computation
parallel, my_cwt, _ = parallel_func(_time_frequency_loop, n_jobs)
# Parallelization is applied across channels.
tfrs = parallel(
my_cwt(channel, Ws, output, use_fft, 'same', decim)
for channel in epoch_data.transpose(1, 0, 2))
# FIXME: to avoid overheads we should use np.array_split()
for channel_idx, tfr in enumerate(tfrs):
out[channel_idx] = tfr
if ('avg_' not in output) and ('itc' not in output):
# This is to enforce that the first dimension is for epochs
out = out.transpose(1, 0, 2, 3)
return out
def _check_tfr_param(frequencies, sfreq, method, zero_mean, n_cycles,
time_bandwidth, use_fft, decim, output):
"""Aux. function to _compute_tfr to check the params validity."""
# Check frequencies
if not isinstance(frequencies, (list, np.ndarray)):
raise ValueError('frequencies must be an array-like, got %s '
'instead.' % type(frequencies))
frequencies = np.asarray(frequencies, dtype=float)
if frequencies.ndim != 1:
raise ValueError('frequencies must be of shape (n_freqs,), got %s '
'instead.' % np.array(frequencies.shape))
# Check sfreq
if not isinstance(sfreq, (float, int)):
raise ValueError('sfreq must be a float or an int, got %s '
'instead.' % type(sfreq))
sfreq = float(sfreq)
# Default zero_mean = True if multitaper else False
zero_mean = method == 'multitaper' if zero_mean is None else zero_mean
if not isinstance(zero_mean, bool):
raise ValueError('zero_mean should be of type bool, got %s. instead'
% type(zero_mean))
frequencies = np.asarray(frequencies)
if (method == 'multitaper') and (output == 'phase'):
raise NotImplementedError(
'This function is not optimized to compute the phase using the '
'multitaper method. Use np.angle of the complex output instead.')
# Check n_cycles
if isinstance(n_cycles, (int, float)):
n_cycles = float(n_cycles)
elif isinstance(n_cycles, (list, np.ndarray)):
n_cycles = np.array(n_cycles)
if len(n_cycles) != len(frequencies):
raise ValueError('n_cycles must be a float or an array of length '
'%i frequencies, got %i cycles instead.' %
(len(frequencies), len(n_cycles)))
else:
raise ValueError('n_cycles must be a float or an array, got %s '
'instead.' % type(n_cycles))
# Check time_bandwidth
if (method == 'morlet') and (time_bandwidth is not None):
raise ValueError('time_bandwidth only applies to "multitaper" method.')
elif method == 'multitaper':
time_bandwidth = (4.0 if time_bandwidth is None
else float(time_bandwidth))
# Check use_fft
if not isinstance(use_fft, bool):
raise ValueError('use_fft must be a boolean, got %s '
'instead.' % type(use_fft))
# Check decim
if isinstance(decim, int):
decim = slice(None, None, decim)
if not isinstance(decim, slice):
raise ValueError('decim must be an integer or a slice, '
'got %s instead.' % type(decim))
# Check output
allowed_ouput = ('complex', 'power', 'phase',
'avg_power_itc', 'avg_power', 'itc')
if output not in allowed_ouput:
raise ValueError("Unknown output type. Allowed are %s but "
"got %s." % (allowed_ouput, output))
if method not in ('multitaper', 'morlet'):
raise ValueError('method must be "morlet" or "multitaper", got %s '
'instead.' % type(method))
return frequencies, sfreq, zero_mean, n_cycles, time_bandwidth, decim
def _time_frequency_loop(X, Ws, output, use_fft, mode, decim):
"""Aux. function to _compute_tfr.
Loops time-frequency transform across wavelets and epochs.
Parameters
----------
X : array, shape (n_epochs, n_times)
The epochs data of a single channel.
Ws : list, shape (n_tapers, n_wavelets, n_times)
The wavelets.
output : str
* 'complex' : single trial complex.
* 'power' : single trial power.
* 'phase' : single trial phase.
* 'avg_power' : average of single trial power.
* 'itc' : inter-trial coherence.
* 'avg_power_itc' : average of single trial power and inter-trial
coherence across trials.
use_fft : bool
Use the FFT for convolutions or not.
mode : {'full', 'valid', 'same'}
See numpy.convolve.
decim : slice
The decimation slice: e.g. power[:, decim]
"""
# Set output type
dtype = np.float
if output in ['complex', 'avg_power_itc']:
dtype = np.complex
# Init outputs
decim = _check_decim(decim)
n_epochs, n_times = X[:, decim].shape
n_freqs = len(Ws[0])
if ('avg_' in output) or ('itc' in output):
tfrs = np.zeros((n_freqs, n_times), dtype=dtype)
else:
tfrs = np.zeros((n_epochs, n_freqs, n_times), dtype=dtype)
# Loops across tapers.
for W in Ws:
coefs = _cwt(X, W, mode, decim=decim, use_fft=use_fft)
# Inter-trial phase locking is apparently computed per taper...
if 'itc' in output:
plf = np.zeros((n_freqs, n_times), dtype=np.complex)
# Loop across epochs
for epoch_idx, tfr in enumerate(coefs):
# Transform complex values
if output in ['power', 'avg_power']:
tfr = (tfr * tfr.conj()).real # power
elif output == 'phase':
tfr = np.angle(tfr)
elif output == 'avg_power_itc':
tfr_abs = np.abs(tfr)
plf += tfr / tfr_abs # phase
tfr = tfr_abs ** 2 # power
elif output == 'itc':
plf += tfr / np.abs(tfr) # phase
continue # not need to stack anything else than plf
# Stack or add
if ('avg_' in output) or ('itc' in output):
tfrs += tfr
else:
tfrs[epoch_idx] += tfr
# Compute inter trial coherence
if output == 'avg_power_itc':
tfrs += 1j * np.abs(plf)
elif output == 'itc':
tfrs += np.abs(plf)
# Normalization of average metrics
if ('avg_' in output) or ('itc' in output):
tfrs /= n_epochs
# Normalization by number of taper
tfrs /= len(Ws)
return tfrs
def cwt(X, Ws, use_fft=True, mode='same', decim=1):
"""Compute time freq decomposition with continuous wavelet transform.
Parameters
----------
X : array, shape (n_signals, n_times)
The signals.
Ws : list of array
Wavelets time series.
use_fft : bool
Use FFT for convolutions. Defaults to True.
mode : 'same' | 'valid' | 'full'
Convention for convolution. 'full' is currently not implemented with
`use_fft=False`. Defaults to 'same'.
decim : int | slice
To reduce memory usage, decimation factor after time-frequency
decomposition.
If `int`, returns tfr[..., ::decim].
If `slice`, returns tfr[..., decim].
.. note:: Decimation may create aliasing artifacts.
Defaults to 1.
Returns
-------
tfr : array, shape (n_signals, n_frequencies, n_times)
The time-frequency decompositions.
See Also
--------
mne.time_frequency.tfr_morlet : Compute time-frequency decomposition
with Morlet wavelets
"""
decim = _check_decim(decim)
n_signals, n_times = X[:, decim].shape
coefs = _cwt(X, Ws, mode, decim=decim, use_fft=use_fft)
tfrs = np.empty((n_signals, len(Ws), n_times), dtype=np.complex)
for k, tfr in enumerate(coefs):
tfrs[k] = tfr
return tfrs
def _tfr_aux(method, inst, freqs, decim, return_itc, picks, average,
**tfr_params):
"""Help reduce redundancy between tfr_morlet and tfr_multitaper."""
decim = _check_decim(decim)
data = _get_data(inst, return_itc)
info = inst.info
info, data, picks = _prepare_picks(info, data, picks)
data = data[:, picks, :]
if average:
if return_itc:
output = 'avg_power_itc'
else:
output = 'avg_power'
else:
output = 'power'
if return_itc:
raise ValueError('Inter-trial coherence is not supported'
' with average=False')
out = _compute_tfr(data, freqs, info['sfreq'], method=method,
output=output, decim=decim, **tfr_params)
times = inst.times[decim].copy()
if average:
if return_itc:
power, itc = out.real, out.imag
else:
power = out
nave = len(data)
out = AverageTFR(info, power, times, freqs, nave,
method='%s-power' % method)
if return_itc:
out = (out, AverageTFR(info, itc, times, freqs, nave,
method='%s-itc' % method))
else:
power = out
out = EpochsTFR(info, power, times, freqs, method='%s-power' % method)
return out
@verbose
def tfr_morlet(inst, freqs, n_cycles, use_fft=False, return_itc=True, decim=1,
n_jobs=1, picks=None, zero_mean=True, average=True,
verbose=None):
"""Compute Time-Frequency Representation (TFR) using Morlet wavelets.
Parameters
----------
inst : Epochs | Evoked
The epochs or evoked object.
freqs : ndarray, shape (n_freqs,)
The frequencies in Hz.
n_cycles : float | ndarray, shape (n_freqs,)
The number of cycles globally or for each frequency.
use_fft : bool, defaults to False
The fft based convolution or not.
return_itc : bool, defaults to True
Return inter-trial coherence (ITC) as well as averaged power.
Must be ``False`` for evoked data.
decim : int | slice, defaults to 1
To reduce memory usage, decimation factor after time-frequency
decomposition.
If `int`, returns tfr[..., ::decim].
If `slice`, returns tfr[..., decim].
.. note:: Decimation may create aliasing artifacts.
n_jobs : int, defaults to 1
The number of jobs to run in parallel.
picks : array-like of int | None, defaults to None
The indices of the channels to decompose. If None, all available
channels are decomposed.
zero_mean : bool, defaults to True
Make sure the wavelet has a mean of zero.
.. versionadded:: 0.13.0
average : bool, defaults to True
If True average across Epochs.
.. versionadded:: 0.13.0
verbose : bool, str, int, or None, defaults to None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
power : AverageTFR | EpochsTFR
The averaged or single-trial power.
itc : AverageTFR | EpochsTFR
The inter-trial coherence (ITC). Only returned if return_itc
is True.
See Also
--------
mne.time_frequency.tfr_array_morlet
mne.time_frequency.tfr_multitaper
mne.time_frequency.tfr_array_multitaper
mne.time_frequency.tfr_stockwell
mne.time_frequency.tfr_array_stockwell
"""
tfr_params = dict(n_cycles=n_cycles, n_jobs=n_jobs, use_fft=use_fft,
zero_mean=zero_mean)
return _tfr_aux('morlet', inst, freqs, decim, return_itc, picks,
average, **tfr_params)
@verbose
def tfr_array_morlet(epoch_data, sfreq, frequencies, n_cycles=7.0,
zero_mean=False, use_fft=True, decim=1, output='complex',
n_jobs=1, verbose=None):
"""Compute time-frequency transform using Morlet wavelets.
Convolves epoch data with selected Morlet wavelets.
Parameters
----------
epoch_data : array of shape (n_epochs, n_channels, n_times)
The epochs.
sfreq : float | int
Sampling frequency of the data.
frequencies : array-like of floats, shape (n_freqs)
The frequencies.
n_cycles : float | array of float, defaults to 7.0
Number of cycles in the Morlet wavelet. Fixed number or one per
frequency.
zero_mean : bool | False
If True, make sure the wavelets have a mean of zero. Defaults to False.
use_fft : bool
Use the FFT for convolutions or not. Defaults to True.
decim : int | slice
To reduce memory usage, decimation factor after time-frequency
decomposition. Defaults to 1
If `int`, returns tfr[..., ::decim].
If `slice`, returns tfr[..., decim].
.. note::
Decimation may create aliasing artifacts, yet decimation
is done after the convolutions.
output : str, defaults to 'complex'
* 'complex' : single trial complex.
* 'power' : single trial power.
* 'phase' : single trial phase.
* 'avg_power' : average of single trial power.
* 'itc' : inter-trial coherence.
* 'avg_power_itc' : average of single trial power and inter-trial
coherence across trials.
n_jobs : int
The number of epochs to process at the same time. The parallelization
is implemented across channels. Defaults to 1
verbose : bool, str, int, or None, defaults to None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
out : array
Time frequency transform of epoch_data. If output is in ['complex',
'phase', 'power'], then shape of out is (n_epochs, n_chans, n_freqs,
n_times), else it is (n_chans, n_freqs, n_times). If output is
'avg_power_itc', the real values code for 'avg_power' and the
imaginary values code for the 'itc': out = avg_power + i * itc
See Also
--------
mne.time_frequency.tfr_morlet
mne.time_frequency.tfr_multitaper
mne.time_frequency.tfr_array_multitaper
mne.time_frequency.tfr_stockwell
mne.time_frequency.tfr_array_stockwell
Notes
-----
.. versionadded:: 0.14.0
"""
return _compute_tfr(epoch_data=epoch_data, frequencies=frequencies,
sfreq=sfreq, method='morlet', n_cycles=n_cycles,
zero_mean=zero_mean, time_bandwidth=None,
use_fft=use_fft, decim=decim, output=output,
n_jobs=n_jobs, verbose=verbose)
@verbose
def tfr_multitaper(inst, freqs, n_cycles, time_bandwidth=4.0,
use_fft=True, return_itc=True, decim=1,
n_jobs=1, picks=None, average=True, verbose=None):
"""Compute Time-Frequency Representation (TFR) using DPSS tapers.
Parameters
----------
inst : Epochs | Evoked
The epochs or evoked object.
freqs : ndarray, shape (n_freqs,)
The frequencies in Hz.
n_cycles : float | ndarray, shape (n_freqs,)
The number of cycles globally or for each frequency.
The time-window length is thus T = n_cycles / freq.
time_bandwidth : float, (optional), defaults to 4.0 (3 good tapers).
Time x (Full) Bandwidth product. Should be >= 2.0.
Choose this along with n_cycles to get desired frequency resolution.
The number of good tapers (least leakage from far away frequencies)
is chosen automatically based on this to floor(time_bandwidth - 1).
E.g., With freq = 20 Hz and n_cycles = 10, we get time = 0.5 s.
If time_bandwidth = 4., then frequency smoothing is (4 / time) = 8 Hz.
use_fft : bool, defaults to True
The fft based convolution or not.
return_itc : bool, defaults to True
Return inter-trial coherence (ITC) as well as averaged (or
single-trial) power.
decim : int | slice, defaults to 1
To reduce memory usage, decimation factor after time-frequency
decomposition.
If `int`, returns tfr[..., ::decim].
If `slice`, returns tfr[..., decim].
.. note:: Decimation may create aliasing artifacts.
n_jobs : int, defaults to 1
The number of jobs to run in parallel.
picks : array-like of int | None, defaults to None
The indices of the channels to decompose. If None, all available
channels are decomposed.
average : bool, defaults to True
If True average across Epochs.
.. versionadded:: 0.13.0
verbose : bool, str, int, or None, defaults to None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Returns
-------
power : AverageTFR | EpochsTFR
The averaged or single-trial power.
itc : AverageTFR | EpochsTFR
The inter-trial coherence (ITC). Only returned if return_itc
is True.
See Also
--------
mne.time_frequency.tfr_array_multitaper
mne.time_frequency.tfr_stockwell
mne.time_frequency.tfr_array_stockwell
mne.time_frequency.tfr_morlet
mne.time_frequency.tfr_array_morlet
Notes
-----
.. versionadded:: 0.9.0
"""
tfr_params = dict(n_cycles=n_cycles, n_jobs=n_jobs, use_fft=use_fft,
zero_mean=True, time_bandwidth=time_bandwidth)
return _tfr_aux('multitaper', inst, freqs, decim, return_itc, picks,
average, **tfr_params)
# TFR(s) class
class _BaseTFR(ContainsMixin, UpdateChannelsMixin, SizeMixin):
"""Base TFR class."""
@property
def data(self):
return self._data
@data.setter
def data(self, data):
self._data = data
@property
def ch_names(self):
"""Channel names."""
return self.info['ch_names']
def crop(self, tmin=None, tmax=None):
"""Crop data to a given time interval in place.
Parameters
----------
tmin : float | None
Start time of selection in seconds.
tmax : float | None
End time of selection in seconds.
Returns
-------
inst : instance of AverageTFR
The modified instance.
"""
mask = _time_mask(self.times, tmin, tmax, sfreq=self.info['sfreq'])
self.times = self.times[mask]
self.data = self.data[..., mask]
return self
def copy(self):
"""Return a copy of the instance."""
return deepcopy(self)
@verbose
def apply_baseline(self, baseline, mode='mean', verbose=None):
"""Baseline correct the data.
Parameters
----------
baseline : tuple or list of length 2
The time interval to apply rescaling / baseline correction.
If None do not apply it. If baseline is (a, b)
the interval is between "a (s)" and "b (s)".
If a is None the beginning of the data is used
and if b is None then b is set to the end of the interval.
If baseline is equal to (None, None) all the time
interval is used.
mode : None | 'ratio' | 'zscore' | 'mean' | 'percent' | 'logratio' | 'zlogratio'
Do baseline correction with ratio (power is divided by mean
power during baseline) or zscore (power is divided by standard
deviation of power during baseline after subtracting the mean,
power = [power - mean(power_baseline)] / std(power_baseline)),
mean simply subtracts the mean power, percent is the same as
applying ratio then mean, logratio is the same as mean but then
rendered in log-scale, zlogratio is the same as zscore but data
is rendered in log-scale first.
If None no baseline correction is applied.
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`).
Returns
-------
inst : instance of AverageTFR
The modified instance.
""" # noqa: E501
self.data = rescale(self.data, self.times, baseline, mode,
copy=False)
return self
class AverageTFR(_BaseTFR):
"""Container for Time-Frequency data.
Can for example store induced power at sensor level or inter-trial
coherence.
Parameters
----------
info : Info
The measurement info.
data : ndarray, shape (n_channels, n_freqs, n_times)
The data.
times : ndarray, shape (n_times,)
The time values in seconds.
freqs : ndarray, shape (n_freqs,)
The frequencies in Hz.
nave : int
The number of averaged TFRs.
comment : str | None, defaults to None
Comment on the data, e.g., the experimental condition.
method : str | None, defaults to None
Comment on the method used to compute the data, e.g., morlet wavelet.
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Attributes
----------
ch_names : list
The names of the channels.
"""
@verbose
def __init__(self, info, data, times, freqs, nave, comment=None,
method=None, verbose=None): # noqa: D102
self.info = info
if data.ndim != 3:
raise ValueError('data should be 3d. Got %d.' % data.ndim)
n_channels, n_freqs, n_times = data.shape
if n_channels != len(info['chs']):
raise ValueError("Number of channels and data size don't match"
" (%d != %d)." % (n_channels, len(info['chs'])))
if n_freqs != len(freqs):
raise ValueError("Number of frequencies and data size don't match"
" (%d != %d)." % (n_freqs, len(freqs)))
if n_times != len(times):
raise ValueError("Number of times and data size don't match"
" (%d != %d)." % (n_times, len(times)))
self.data = data
self.times = np.array(times, dtype=float)
self.freqs = np.array(freqs, dtype=float)
self.nave = nave
self.comment = comment
self.method = method
self.preload = True
@verbose
def plot(self, picks, baseline=None, mode='mean', tmin=None, tmax=None,
fmin=None, fmax=None, vmin=None, vmax=None, cmap='RdBu_r',
dB=False, colorbar=True, show=True, title=None, axes=None,
layout=None, yscale='auto', verbose=None):
"""Plot TFRs as a two-dimensional image(s).
Parameters
----------
picks : array-like of int
The indices of the channels to plot, one figure per channel.
baseline : None (default) or tuple of length 2
The time interval to apply baseline correction.
If None do not apply it. If baseline is (a, b)
the interval is between "a (s)" and "b (s)".
If a is None the beginning of the data is used
and if b is None then b is set to the end of the interval.
If baseline is equal ot (None, None) all the time
interval is used.
mode : None | 'ratio' | 'zscore' | 'mean' | 'percent' | 'logratio' | 'zlogratio'
Do baseline correction with ratio (power is divided by mean
power during baseline) or zscore (power is divided by standard
deviation of power during baseline after subtracting the mean,
power = [power - mean(power_baseline)] / std(power_baseline)),
mean simply subtracts the mean power, percent is the same as
applying ratio then mean, logratio is the same as mean but then
rendered in log-scale, zlogratio is the same as zscore but data
is rendered in log-scale first.
If None no baseline correction is applied.
tmin : None | float
The first time instant to display. If None the first time point
available is used.
tmax : None | float
The last time instant to display. If None the last time point
available is used.
fmin : None | float
The first frequency to display. If None the first frequency
available is used.
fmax : None | float
The last frequency to display. If None the last frequency
available is used.
vmin : float | None
The mininum value an the color scale. If vmin is None, the data
minimum value is used.
vmax : float | None
The maxinum value an the color scale. If vmax is None, the data
maximum value is used.
cmap : matplotlib colormap | 'interactive' | (colormap, bool)
The colormap to use. If tuple, the first value indicates the
colormap to use and the second value is a boolean defining
interactivity. In interactive mode the colors are adjustable by
clicking and dragging the colorbar with left and right mouse
button. Left mouse button moves the scale up and down and right
mouse button adjusts the range. Hitting space bar resets the range.
Up and down arrows can be used to change the colormap. If
'interactive', translates to ('RdBu_r', True). Defaults to
'RdBu_r'.
.. warning:: Interactive mode works smoothly only for a small
amount of images.
dB : bool
If True, 20*log10 is applied to the data to get dB.
colorbar : bool
If true, colorbar will be added to the plot. For user defined axes,
the colorbar cannot be drawn. Defaults to True.
show : bool
Call pyplot.show() at the end.
title : str | None
String for title. Defaults to None (blank/no title).
axes : instance of Axes | list | None
The axes to plot to. If list, the list must be a list of Axes of
the same length as the number of channels. If instance of Axes,
there must be only one channel plotted.
layout : Layout | None
Layout instance specifying sensor positions. Used for interactive
plotting of topographies on rectangle selection. If possible, the
correct layout is inferred from the data.
yscale : 'auto' (default) | 'linear' | 'log'
The scale of y (frequency) axis. 'linear' gives linear y axis,
'log' leads to log-spaced y axis and 'auto' detects if frequencies
are log-spaced and only then sets the y axis to 'log'.
.. versionadded:: 0.14.0
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`).
Returns
-------
fig : matplotlib.figure.Figure
The figure containing the topography.
""" # noqa: E501
from ..viz.topo import _imshow_tfr
import matplotlib.pyplot as plt
times, freqs = self.times.copy(), self.freqs.copy()
info = self.info
data = self.data
n_picks = len(picks)
info, data, picks = _prepare_picks(info, data, picks)
data = data[picks]
data, times, freqs, vmin, vmax = \
_preproc_tfr(data, times, freqs, tmin, tmax, fmin, fmax, mode,
baseline, vmin, vmax, dB, info['sfreq'])
tmin, tmax = times[0], times[-1]
if isinstance(axes, plt.Axes):
axes = [axes]
if isinstance(axes, list) or isinstance(axes, np.ndarray):
if len(axes) != n_picks:
raise RuntimeError('There must be an axes for each picked '
'channel.')
cmap = _setup_cmap(cmap)
for idx in range(len(data)):
if axes is None:
fig = plt.figure()
ax = fig.add_subplot(111)
else:
ax = axes[idx]
fig = ax.get_figure()
onselect_callback = partial(self._onselect, baseline=baseline,
mode=mode, layout=layout)
_imshow_tfr(ax, 0, tmin, tmax, vmin, vmax, onselect_callback,
ylim=None, tfr=data[idx: idx + 1], freq=freqs,
x_label='Time (ms)', y_label='Frequency (Hz)',
colorbar=colorbar, cmap=cmap, yscale=yscale)
if title:
fig.suptitle(title)
plt_show(show)
return fig
def _onselect(self, eclick, erelease, baseline, mode, layout):
"""Handle rubber band selector in channel tfr."""
import matplotlib.pyplot as plt
from ..viz import plot_tfr_topomap
if abs(eclick.x - erelease.x) < .1 or abs(eclick.y - erelease.y) < .1:
return
plt.ion() # turn interactive mode on
tmin = round(min(eclick.xdata, erelease.xdata) / 1000., 5) # ms to s
tmax = round(max(eclick.xdata, erelease.xdata) / 1000., 5)
fmin = round(min(eclick.ydata, erelease.ydata), 5) # Hz
fmax = round(max(eclick.ydata, erelease.ydata), 5)
tmin = min(self.times, key=lambda x: abs(x - tmin)) # find closest
tmax = min(self.times, key=lambda x: abs(x - tmax))
fmin = min(self.freqs, key=lambda x: abs(x - fmin))
fmax = min(self.freqs, key=lambda x: abs(x - fmax))
if tmin == tmax or fmin == fmax:
logger.info('The selected area is too small. '
'Select a larger time-frequency window.')
return
types = list()
if 'eeg' in self:
types.append('eeg')
if 'mag' in self:
types.append('mag')
if 'grad' in self:
if len(_pair_grad_sensors(self.info, topomap_coords=False,
raise_error=False)) >= 2:
types.append('grad')
elif len(types) == 0:
return # Don't draw a figure for nothing.
fig = figure_nobar()
fig.suptitle('{0:.2f} s - {1:.2f} s, {2:.2f} Hz - {3:.2f} Hz'.format(
tmin, tmax, fmin, fmax), y=0.04)
for idx, ch_type in enumerate(types):
ax = plt.subplot(1, len(types), idx + 1)
plot_tfr_topomap(self, ch_type=ch_type, tmin=tmin, tmax=tmax,
fmin=fmin, fmax=fmax, layout=layout,
baseline=baseline, mode=mode, cmap=None,
title=ch_type, vmin=None, vmax=None,
axes=ax)
def plot_topo(self, picks=None, baseline=None, mode='mean', tmin=None,
tmax=None, fmin=None, fmax=None, vmin=None, vmax=None,
layout=None, cmap='RdBu_r', title=None, dB=False,
colorbar=True, layout_scale=0.945, show=True,
border='none', fig_facecolor='k', fig_background=None,
font_color='w', yscale='auto'):
"""Plot TFRs in a topography with images.
Parameters
----------
picks : array-like of int | None
The indices of the channels to plot. If None, all available
channels are displayed.
baseline : None (default) or tuple of length 2
The time interval to apply baseline correction.
If None do not apply it. If baseline is (a, b)
the interval is between "a (s)" and "b (s)".
If a is None the beginning of the data is used
and if b is None then b is set to the end of the interval.
If baseline is equal ot (None, None) all the time
interval is used.
mode : None | 'ratio' | 'zscore' | 'mean' | 'percent' | 'logratio' | 'zlogratio'
Do baseline correction with ratio (power is divided by mean
power during baseline) or zscore (power is divided by standard
deviation of power during baseline after subtracting the mean,
power = [power - mean(power_baseline)] / std(power_baseline)),
mean simply subtracts the mean power, percent is the same as
applying ratio then mean, logratio is the same as mean but then
rendered in log-scale, zlogratio is the same as zscore but data
is rendered in log-scale first.
If None no baseline correction is applied.
tmin : None | float
The first time instant to display. If None the first time point
available is used.
tmax : None | float
The last time instant to display. If None the last time point
available is used.
fmin : None | float
The first frequency to display. If None the first frequency
available is used.
fmax : None | float
The last frequency to display. If None the last frequency
available is used.
vmin : float | None
The mininum value an the color scale. If vmin is None, the data
minimum value is used.
vmax : float | None
The maxinum value an the color scale. If vmax is None, the data
maximum value is used.
layout : Layout | None
Layout instance specifying sensor positions. If possible, the
correct layout is inferred from the data.
cmap : matplotlib colormap | str
The colormap to use. Defaults to 'RdBu_r'.
title : str
Title of the figure.
dB : bool
If True, 20*log10 is applied to the data to get dB.
colorbar : bool
If true, colorbar will be added to the plot
layout_scale : float
Scaling factor for adjusting the relative size of the layout
on the canvas.
show : bool
Call pyplot.show() at the end.
border : str
matplotlib borders style to be used for each sensor plot.
fig_facecolor : str | obj
The figure face color. Defaults to black.
fig_background : None | array
A background image for the figure. This must be a valid input to
`matplotlib.pyplot.imshow`. Defaults to None.
font_color: str | obj
The color of tick labels in the colorbar. Defaults to white.
yscale : 'auto' (default) | 'linear' | 'log'
The scale of y (frequency) axis. 'linear' gives linear y axis,
'log' leads to log-spaced y axis and 'auto' detects if frequencies
are log-spaced and only then sets the y axis to 'log'.
Returns
-------
fig : matplotlib.figure.Figure
The figure containing the topography.
""" # noqa: E501
from ..viz.topo import _imshow_tfr, _plot_topo, _imshow_tfr_unified
from ..viz import add_background_image
times = self.times.copy()
freqs = self.freqs
data = self.data
info = self.info
info, data, picks = _prepare_picks(info, data, picks)
data = data[picks]
data, times, freqs, vmin, vmax = \
_preproc_tfr(data, times, freqs, tmin, tmax, fmin, fmax,
mode, baseline, vmin, vmax, dB, info['sfreq'])
if layout is None:
from mne import find_layout
layout = find_layout(self.info)
onselect_callback = partial(self._onselect, baseline=baseline,
mode=mode, layout=layout)
click_fun = partial(_imshow_tfr, tfr=data, freq=freqs, yscale=yscale,
cmap=(cmap, True), onselect=onselect_callback)
imshow = partial(_imshow_tfr_unified, tfr=data, freq=freqs, cmap=cmap,
onselect=onselect_callback)
fig = _plot_topo(info=info, times=times, show_func=imshow,
click_func=click_fun, layout=layout,
colorbar=colorbar, vmin=vmin, vmax=vmax, cmap=cmap,
layout_scale=layout_scale, title=title, border=border,
x_label='Time (ms)', y_label='Frequency (Hz)',
fig_facecolor=fig_facecolor, font_color=font_color,
unified=True, img=True)
add_background_image(fig, fig_background)
plt_show(show)
return fig
def plot_topomap(self, tmin=None, tmax=None, fmin=None, fmax=None,
ch_type=None, baseline=None, mode='mean',
layout=None, vmin=None, vmax=None, cmap=None,
sensors=True, colorbar=True, unit=None, res=64, size=2,
cbar_fmt='%1.1e', show_names=False, title=None,
axes=None, show=True, outlines='head', head_pos=None):
"""Plot topographic maps of time-frequency intervals of TFR data.
Parameters
----------
tmin : None | float
The first time instant to display. If None the first time point
available is used.
tmax : None | float
The last time instant to display. If None the last time point
available is used.
fmin : None | float
The first frequency to display. If None the first frequency
available is used.
fmax : None | float
The last frequency to display. If None the last frequency
available is used.
ch_type : 'mag' | 'grad' | 'planar1' | 'planar2' | 'eeg' | None
The channel type to plot. For 'grad', the gradiometers are
collected in pairs and the RMS for each pair is plotted.
If None, then first available channel type from order given
above is used. Defaults to None.
baseline : tuple or list of length 2
The time interval to apply rescaling / baseline correction.<|fim▁hole|> the interval is between "a (s)" and "b (s)".
If a is None the beginning of the data is used
and if b is None then b is set to the end of the interval.
If baseline is equal to (None, None) all the time
interval is used.
mode : None | 'ratio' | 'zscore' | 'mean' | 'percent' | 'logratio' | 'zlogratio'
Do baseline correction with ratio (power is divided by mean
power during baseline) or zscore (power is divided by standard
deviation of power during baseline after subtracting the mean,
power = [power - mean(power_baseline)] / std(power_baseline)),
mean simply subtracts the mean power, percent is the same as
applying ratio then mean, logratio is the same as mean but then
rendered in log-scale, zlogratio is the same as zscore but data
is rendered in log-scale first.
If None no baseline correction is applied.
layout : None | Layout
Layout instance specifying sensor positions (does not need to
be specified for Neuromag data). If possible, the correct layout
file is inferred from the data; if no appropriate layout file was
found, the layout is automatically generated from the sensor
locations.
vmin : float | callable | None
The value specifying the lower bound of the color range. If None,
and vmax is None, -vmax is used. Else np.min(data) or in case
data contains only positive values 0. If callable, the output
equals vmin(data). Defaults to None.
vmax : float | callable | None
The value specifying the upper bound of the color range. If None,
the maximum value is used. If callable, the output equals
vmax(data). Defaults to None.
cmap : matplotlib colormap | (colormap, bool) | 'interactive' | None
Colormap to use. If tuple, the first value indicates the colormap
to use and the second value is a boolean defining interactivity. In
interactive mode the colors are adjustable by clicking and dragging
the colorbar with left and right mouse button. Left mouse button
moves the scale up and down and right mouse button adjusts the
range. Hitting space bar resets the range. Up and down arrows can
be used to change the colormap. If None (default), 'Reds' is used
for all positive data, otherwise defaults to 'RdBu_r'. If
'interactive', translates to (None, True).
sensors : bool | str
Add markers for sensor locations to the plot. Accepts matplotlib
plot format string (e.g., 'r+' for red plusses). If True, a circle
will be used (via .add_artist). Defaults to True.
colorbar : bool
Plot a colorbar.
unit : dict | str | None
The unit of the channel type used for colorbar label. If
scale is None the unit is automatically determined.
res : int
The resolution of the topomap image (n pixels along each side).
size : float
Side length per topomap in inches.
cbar_fmt : str
String format for colorbar values.
show_names : bool | callable
If True, show channel names on top of the map. If a callable is
passed, channel names will be formatted using the callable; e.g.,
to delete the prefix 'MEG ' from all channel names, pass the
function lambda x: x.replace('MEG ', ''). If `mask` is not None,
only significant sensors will be shown.
title : str | None
Title. If None (default), no title is displayed.
axes : instance of Axes | None
The axes to plot to. If None the axes is defined automatically.
show : bool
Call pyplot.show() at the end.
outlines : 'head' | 'skirt' | dict | None
The outlines to be drawn. If 'head', the default head scheme will
be drawn. If 'skirt' the head scheme will be drawn, but sensors are
allowed to be plotted outside of the head circle. If dict, each key
refers to a tuple of x and y positions, the values in 'mask_pos'
will serve as image mask, and the 'autoshrink' (bool) field will
trigger automated shrinking of the positions due to points outside
the outline. Alternatively, a matplotlib patch object can be passed
for advanced masking options, either directly or as a function that
returns patches (required for multi-axis plots). If None, nothing
will be drawn. Defaults to 'head'.
head_pos : dict | None
If None (default), the sensors are positioned such that they span
the head circle. If dict, can have entries 'center' (tuple) and
'scale' (tuple) for what the center and scale of the head should be
relative to the electrode locations.
Returns
-------
fig : matplotlib.figure.Figure
The figure containing the topography.
""" # noqa: E501
from ..viz import plot_tfr_topomap
return plot_tfr_topomap(self, tmin=tmin, tmax=tmax, fmin=fmin,
fmax=fmax, ch_type=ch_type, baseline=baseline,
mode=mode, layout=layout, vmin=vmin, vmax=vmax,
cmap=cmap, sensors=sensors, colorbar=colorbar,
unit=unit, res=res, size=size,
cbar_fmt=cbar_fmt, show_names=show_names,
title=title, axes=axes, show=show,
outlines=outlines, head_pos=head_pos)
def _check_compat(self, tfr):
"""Check that self and tfr have the same time-frequency ranges."""
assert np.all(tfr.times == self.times)
assert np.all(tfr.freqs == self.freqs)
def __add__(self, tfr): # noqa: D105
"""Add instances."""
self._check_compat(tfr)
out = self.copy()
out.data += tfr.data
return out
def __iadd__(self, tfr): # noqa: D105
self._check_compat(tfr)
self.data += tfr.data
return self
def __sub__(self, tfr): # noqa: D105
"""Subtract instances."""
self._check_compat(tfr)
out = self.copy()
out.data -= tfr.data
return out
def __isub__(self, tfr): # noqa: D105
self._check_compat(tfr)
self.data -= tfr.data
return self
def __repr__(self): # noqa: D105
s = "time : [%f, %f]" % (self.times[0], self.times[-1])
s += ", freq : [%f, %f]" % (self.freqs[0], self.freqs[-1])
s += ", nave : %d" % self.nave
s += ', channels : %d' % self.data.shape[0]
s += ', ~%s' % (sizeof_fmt(self._size),)
return "<AverageTFR | %s>" % s
def save(self, fname, overwrite=False):
"""Save TFR object to hdf5 file.
Parameters
----------
fname : str
The file name, which should end with -tfr.h5 .
overwrite : bool
If True, overwrite file (if it exists). Defaults to false
"""
write_tfrs(fname, self, overwrite=overwrite)
class EpochsTFR(_BaseTFR):
"""Container for Time-Frequency data on epochs.
Can for example store induced power at sensor level.
Parameters
----------
info : Info
The measurement info.
data : ndarray, shape (n_epochs, n_channels, n_freqs, n_times)
The data.
times : ndarray, shape (n_times,)
The time values in seconds.
freqs : ndarray, shape (n_freqs,)
The frequencies in Hz.
comment : str | None, defaults to None
Comment on the data, e.g., the experimental condition.
method : str | None, defaults to None
Comment on the method used to compute the data, e.g., morlet wavelet.
verbose : bool, str, int, or None
If not None, override default verbose level (see :func:`mne.verbose`
and :ref:`Logging documentation <tut_logging>` for more).
Attributes
----------
ch_names : list
The names of the channels.
Notes
-----
.. versionadded:: 0.13.0
"""
@verbose
def __init__(self, info, data, times, freqs, comment=None,
method=None, verbose=None): # noqa: D102
self.info = info
if data.ndim != 4:
raise ValueError('data should be 4d. Got %d.' % data.ndim)
n_epochs, n_channels, n_freqs, n_times = data.shape
if n_channels != len(info['chs']):
raise ValueError("Number of channels and data size don't match"
" (%d != %d)." % (n_channels, len(info['chs'])))
if n_freqs != len(freqs):
raise ValueError("Number of frequencies and data size don't match"
" (%d != %d)." % (n_freqs, len(freqs)))
if n_times != len(times):
raise ValueError("Number of times and data size don't match"
" (%d != %d)." % (n_times, len(times)))
self.data = data
self.times = np.array(times, dtype=float)
self.freqs = np.array(freqs, dtype=float)
self.comment = comment
self.method = method
self.preload = True
def __repr__(self): # noqa: D105
s = "time : [%f, %f]" % (self.times[0], self.times[-1])
s += ", freq : [%f, %f]" % (self.freqs[0], self.freqs[-1])
s += ", epochs : %d" % self.data.shape[0]
s += ', channels : %d' % self.data.shape[1]
s += ', ~%s' % (sizeof_fmt(self._size),)
return "<EpochsTFR | %s>" % s
def average(self):
"""Average the data across epochs.
Returns
-------
ave : instance of AverageTFR
The averaged data.
"""
data = np.mean(self.data, axis=0)
return AverageTFR(info=self.info.copy(), data=data,
times=self.times.copy(), freqs=self.freqs.copy(),
nave=self.data.shape[0],
method=self.method)
def combine_tfr(all_tfr, weights='nave'):
"""Merge AverageTFR data by weighted addition.
Create a new AverageTFR instance, using a combination of the supplied
instances as its data. By default, the mean (weighted by trials) is used.
Subtraction can be performed by passing negative weights (e.g., [1, -1]).
Data must have the same channels and the same time instants.
Parameters
----------
all_tfr : list of AverageTFR
The tfr datasets.
weights : list of float | str
The weights to apply to the data of each AverageTFR instance.
Can also be ``'nave'`` to weight according to tfr.nave,
or ``'equal'`` to use equal weighting (each weighted as ``1/N``).
Returns
-------
tfr : AverageTFR
The new TFR data.
Notes
-----
.. versionadded:: 0.11.0
"""
tfr = all_tfr[0].copy()
if isinstance(weights, string_types):
if weights not in ('nave', 'equal'):
raise ValueError('Weights must be a list of float, or "nave" or '
'"equal"')
if weights == 'nave':
weights = np.array([e.nave for e in all_tfr], float)
weights /= weights.sum()
else: # == 'equal'
weights = [1. / len(all_tfr)] * len(all_tfr)
weights = np.array(weights, float)
if weights.ndim != 1 or weights.size != len(all_tfr):
raise ValueError('Weights must be the same size as all_tfr')
ch_names = tfr.ch_names
for t_ in all_tfr[1:]:
assert t_.ch_names == ch_names, ValueError("%s and %s do not contain "
"the same channels"
% (tfr, t_))
assert np.max(np.abs(t_.times - tfr.times)) < 1e-7, \
ValueError("%s and %s do not contain the same time instants"
% (tfr, t_))
# use union of bad channels
bads = list(set(tfr.info['bads']).union(*(t_.info['bads']
for t_ in all_tfr[1:])))
tfr.info['bads'] = bads
# XXX : should be refactored with combined_evoked function
tfr.data = sum(w * t_.data for w, t_ in zip(weights, all_tfr))
tfr.nave = max(int(1. / sum(w ** 2 / e.nave
for w, e in zip(weights, all_tfr))), 1)
return tfr
# Utils
def _get_data(inst, return_itc):
"""Get data from Epochs or Evoked instance as epochs x ch x time."""
from ..epochs import BaseEpochs
from ..evoked import Evoked
if not isinstance(inst, (BaseEpochs, Evoked)):
raise TypeError('inst must be Epochs or Evoked')
if isinstance(inst, BaseEpochs):
data = inst.get_data()
else:
if return_itc:
raise ValueError('return_itc must be False for evoked data')
data = inst.data[np.newaxis].copy()
return data
def _prepare_picks(info, data, picks):
"""Prepare the picks."""
if picks is None:
picks = pick_types(info, meg=True, eeg=True, ref_meg=False,
exclude='bads')
if np.array_equal(picks, np.arange(len(data))):
picks = slice(None)
else:
info = pick_info(info, picks)
return info, data, picks
def _centered(arr, newsize):
"""Aux Function to center data."""
# Return the center newsize portion of the array.
newsize = np.asarray(newsize)
currsize = np.array(arr.shape)
startind = (currsize - newsize) // 2
endind = startind + newsize
myslice = [slice(startind[k], endind[k]) for k in range(len(endind))]
return arr[tuple(myslice)]
def _preproc_tfr(data, times, freqs, tmin, tmax, fmin, fmax, mode,
baseline, vmin, vmax, dB, sfreq):
"""Aux Function to prepare tfr computation."""
from ..viz.utils import _setup_vmin_vmax
copy = baseline is not None
data = rescale(data, times, baseline, mode, copy=copy)
# crop time
itmin, itmax = None, None
idx = np.where(_time_mask(times, tmin, tmax, sfreq=sfreq))[0]
if tmin is not None:
itmin = idx[0]
if tmax is not None:
itmax = idx[-1] + 1
times = times[itmin:itmax]
# crop freqs
ifmin, ifmax = None, None
idx = np.where(_time_mask(freqs, fmin, fmax, sfreq=sfreq))[0]
if fmin is not None:
ifmin = idx[0]
if fmax is not None:
ifmax = idx[-1] + 1
freqs = freqs[ifmin:ifmax]
# crop data
data = data[:, ifmin:ifmax, itmin:itmax]
times *= 1e3
if dB:
data = 10 * np.log10((data * data.conj()).real)
vmin, vmax = _setup_vmin_vmax(data, vmin, vmax)
return data, times, freqs, vmin, vmax
def _check_decim(decim):
"""Aux function checking the decim parameter."""
if isinstance(decim, int):
decim = slice(None, None, decim)
elif not isinstance(decim, slice):
raise(TypeError, '`decim` must be int or slice, got %s instead'
% type(decim))
return decim
# i/o
def write_tfrs(fname, tfr, overwrite=False):
"""Write a TFR dataset to hdf5.
Parameters
----------
fname : string
The file name, which should end with -tfr.h5
tfr : AverageTFR instance, or list of AverageTFR instances
The TFR dataset, or list of TFR datasets, to save in one file.
Note. If .comment is not None, a name will be generated on the fly,
based on the order in which the TFR objects are passed
overwrite : bool
If True, overwrite file (if it exists). Defaults to False.
See Also
--------
read_tfrs
Notes
-----
.. versionadded:: 0.9.0
"""
out = []
if not isinstance(tfr, (list, tuple)):
tfr = [tfr]
for ii, tfr_ in enumerate(tfr):
comment = ii if tfr_.comment is None else tfr_.comment
out.append(_prepare_write_tfr(tfr_, condition=comment))
write_hdf5(fname, out, overwrite=overwrite, title='mnepython')
def _prepare_write_tfr(tfr, condition):
"""Aux function."""
return (condition, dict(times=tfr.times, freqs=tfr.freqs,
data=tfr.data, info=tfr.info,
nave=tfr.nave, comment=tfr.comment,
method=tfr.method))
def read_tfrs(fname, condition=None):
"""Read TFR datasets from hdf5 file.
Parameters
----------
fname : string
The file name, which should end with -tfr.h5 .
condition : int or str | list of int or str | None
The condition to load. If None, all conditions will be returned.
Defaults to None.
See Also
--------
write_tfrs
Returns
-------
tfrs : list of instances of AverageTFR | instance of AverageTFR
Depending on `condition` either the TFR object or a list of multiple
TFR objects.
Notes
-----
.. versionadded:: 0.9.0
"""
check_fname(fname, 'tfr', ('-tfr.h5',))
logger.info('Reading %s ...' % fname)
tfr_data = read_hdf5(fname, title='mnepython')
for k, tfr in tfr_data:
tfr['info'] = Info(tfr['info'])
if condition is not None:
tfr_dict = dict(tfr_data)
if condition not in tfr_dict:
keys = ['%s' % k for k in tfr_dict]
raise ValueError('Cannot find condition ("{0}") in this file. '
'The file contains "{1}""'
.format(condition, " or ".join(keys)))
out = AverageTFR(**tfr_dict[condition])
else:
out = [AverageTFR(**d) for d in list(zip(*tfr_data))[1]]
return out<|fim▁end|> | If None do not apply it. If baseline is (a, b) |
<|file_name|>CamundaConnectorImpl.java<|end_file_name|><|fim▁begin|>/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.model.bpmn.impl.instance.camunda;
import static org.camunda.bpm.model.bpmn.impl.BpmnModelConstants.CAMUNDA_ELEMENT_CONNECTOR;
import static org.camunda.bpm.model.bpmn.impl.BpmnModelConstants.CAMUNDA_NS;
import org.camunda.bpm.model.bpmn.impl.instance.BpmnModelElementInstanceImpl;
import org.camunda.bpm.model.bpmn.instance.camunda.CamundaConnector;
import org.camunda.bpm.model.bpmn.instance.camunda.CamundaConnectorId;
import org.camunda.bpm.model.bpmn.instance.camunda.CamundaInputOutput;
import org.camunda.bpm.model.xml.ModelBuilder;
import org.camunda.bpm.model.xml.impl.instance.ModelTypeInstanceContext;
import org.camunda.bpm.model.xml.type.ModelElementTypeBuilder;
import org.camunda.bpm.model.xml.type.ModelElementTypeBuilder.ModelTypeInstanceProvider;
import org.camunda.bpm.model.xml.type.child.ChildElement;
import org.camunda.bpm.model.xml.type.child.SequenceBuilder;
/**
* The BPMN connector camunda extension element
*
* @author Sebastian Menski
*/
public class CamundaConnectorImpl extends BpmnModelElementInstanceImpl implements CamundaConnector {
protected static ChildElement<CamundaConnectorId> camundaConnectorIdChild;
protected static ChildElement<CamundaInputOutput> camundaInputOutputChild;<|fim▁hole|> .instanceProvider(new ModelTypeInstanceProvider<CamundaConnector>() {
public CamundaConnector newInstance(ModelTypeInstanceContext instanceContext) {
return new CamundaConnectorImpl(instanceContext);
}
});
SequenceBuilder sequenceBuilder = typeBuilder.sequence();
camundaConnectorIdChild = sequenceBuilder.element(CamundaConnectorId.class)
.required()
.build();
camundaInputOutputChild = sequenceBuilder.element(CamundaInputOutput.class)
.build();
typeBuilder.build();
}
public CamundaConnectorImpl(ModelTypeInstanceContext instanceContext) {
super(instanceContext);
}
public CamundaConnectorId getCamundaConnectorId() {
return camundaConnectorIdChild.getChild(this);
}
public void setCamundaConnectorId(CamundaConnectorId camundaConnectorId) {
camundaConnectorIdChild.setChild(this, camundaConnectorId);
}
public CamundaInputOutput getCamundaInputOutput() {
return camundaInputOutputChild.getChild(this);
}
public void setCamundaInputOutput(CamundaInputOutput camundaInputOutput) {
camundaInputOutputChild.setChild(this, camundaInputOutput);
}
}<|fim▁end|> |
public static void registerType(ModelBuilder modelBuilder) {
ModelElementTypeBuilder typeBuilder = modelBuilder.defineType(CamundaConnector.class, CAMUNDA_ELEMENT_CONNECTOR)
.namespaceUri(CAMUNDA_NS) |
<|file_name|>rcvr-borrowed-to-slice.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait sum {
fn sum_(self) -> int;
}
// Note: impl on a slice
impl<'a> sum for &'a [int] {
fn sum_(self) -> int {
self.iter().fold(0, |a, &b| a + b)
}
}
fn call_sum(x: &[int]) -> int { x.sum_() }
pub fn main() {
let x = vec!(1, 2, 3);
let y = call_sum(x.as_slice());
println!("y=={}", y);
assert_eq!(y, 6);
let x = vec!(1, 2, 3);
let y = x.as_slice().sum_();
println!("y=={}", y);
assert_eq!(y, 6);
<|fim▁hole|> assert_eq!(y, 6);
}<|fim▁end|> | let x = vec!(1, 2, 3);
let y = x.as_slice().sum_();
println!("y=={}", y); |
<|file_name|>template_functions.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <[email protected]>'
__docformat__ = 'restructuredtext en'
import json, traceback
from PyQt4.Qt import QDialogButtonBox
from calibre.gui2 import error_dialog, warning_dialog
from calibre.gui2.preferences import ConfigWidgetBase, test_widget
from calibre.gui2.preferences.template_functions_ui import Ui_Form
from calibre.gui2.widgets import PythonHighlighter
from calibre.utils.formatter_functions import (formatter_functions,
compile_user_function, load_user_template_functions)
class ConfigWidget(ConfigWidgetBase, Ui_Form):
def genesis(self, gui):
self.gui = gui
self.db = gui.library_view.model().db
help_text = _('''
<p>Here you can add and remove functions used in template processing. A
template function is written in python. It takes information from the
book, processes it in some way, then returns a string result. Functions
defined here are usable in templates in the same way that builtin
functions are usable. The function must be named <b>evaluate</b>, and
must have the signature shown below.</p>
<p><code>evaluate(self, formatter, kwargs, mi, locals, your parameters)
→ returning a unicode string</code></p>
<p>The parameters of the evaluate function are:
<ul>
<li><b>formatter</b>: the instance of the formatter being used to
evaluate the current template. You can use this to do recursive
template evaluation.</li>
<li><b>kwargs</b>: a dictionary of metadata. Field values are in this
dictionary.
<li><b>mi</b>: a Metadata instance. Used to get field information.
This parameter can be None in some cases, such as when evaluating
non-book templates.</li>
<li><b>locals</b>: the local variables assigned to by the current
template program.</li>
<li><b>your parameters</b>: You must supply one or more formal
parameters. The number must match the arg count box, unless arg count is
-1 (variable number or arguments), in which case the last argument must
be *args. At least one argument is required, and is usually the value of
the field being operated upon. Note that when writing in basic template
mode, the user does not provide this first argument. Instead it is
supplied by the formatter.</li>
</ul></p>
<p>
The following example function checks the value of the field. If the
field is not empty, the field's value is returned, otherwise the value
EMPTY is returned.
<pre>
name: my_ifempty
arg count: 1
doc: my_ifempty(val) -- return val if it is not empty, otherwise the string 'EMPTY'
program code:
def evaluate(self, formatter, kwargs, mi, locals, val):
if val:
return val
else:
return 'EMPTY'</pre>
This function can be called in any of the three template program modes:
<ul>
<li>single-function mode: {tags:my_ifempty()}</li>
<li>template program mode: {tags:'my_ifempty($)'}</li>
<li>general program mode: program: my_ifempty(field('tags'))</li>
</p>
''')
self.textBrowser.setHtml(help_text)
def initialize(self):
try:
self.builtin_source_dict = json.loads(P('template-functions.json', data=True,
allow_user_override=False).decode('utf-8'))
except:
traceback.print_exc()
self.builtin_source_dict = {}
self.funcs = formatter_functions().get_functions()
self.builtins = formatter_functions().get_builtins_and_aliases()
self.build_function_names_box()
self.function_name.currentIndexChanged[str].connect(self.function_index_changed)
self.function_name.editTextChanged.connect(self.function_name_edited)
self.argument_count.valueChanged.connect(self.enable_replace_button)
self.documentation.textChanged.connect(self.enable_replace_button)
self.program.textChanged.connect(self.enable_replace_button)
self.create_button.clicked.connect(self.create_button_clicked)
self.delete_button.clicked.connect(self.delete_button_clicked)
self.create_button.setEnabled(False)
self.delete_button.setEnabled(False)
self.replace_button.setEnabled(False)
self.clear_button.clicked.connect(self.clear_button_clicked)
self.replace_button.clicked.connect(self.replace_button_clicked)
self.program.setTabStopWidth(20)
self.highlighter = PythonHighlighter(self.program.document())
def enable_replace_button(self):
self.replace_button.setEnabled(self.delete_button.isEnabled())
def clear_button_clicked(self):
self.build_function_names_box()
self.program.clear()
self.documentation.clear()
self.argument_count.clear()
self.create_button.setEnabled(False)
self.delete_button.setEnabled(False)
def build_function_names_box(self, scroll_to='', set_to=''):
self.function_name.blockSignals(True)
func_names = sorted(self.funcs)
self.function_name.clear()
self.function_name.addItem('')
self.function_name.addItems(func_names)
self.function_name.setCurrentIndex(0)
if set_to:
self.function_name.setEditText(set_to)
self.create_button.setEnabled(True)
self.function_name.blockSignals(False)
if scroll_to:
idx = self.function_name.findText(scroll_to)
if idx >= 0:
self.function_name.setCurrentIndex(idx)
if scroll_to not in self.builtins:
self.delete_button.setEnabled(True)
def delete_button_clicked(self):
name = unicode(self.function_name.currentText())
if name in self.builtins:
error_dialog(self.gui, _('Template functions'),
_('You cannot delete a built-in function'), show=True)
if name in self.funcs:
del self.funcs[name]
self.changed_signal.emit()
self.create_button.setEnabled(True)
self.delete_button.setEnabled(False)
self.build_function_names_box(set_to=name)
self.program.setReadOnly(False)
else:
error_dialog(self.gui, _('Template functions'),
_('Function not defined'), show=True)
def create_button_clicked(self):
self.changed_signal.emit()
name = unicode(self.function_name.currentText())
if name in self.funcs:
error_dialog(self.gui, _('Template functions'),
_('Name %s already used')%(name,), show=True)
return
if self.argument_count.value() == 0:
box = warning_dialog(self.gui, _('Template functions'),
_('Argument count should be -1 or greater than zero. '
'Setting it to zero means that this function cannot '
'be used in single function mode.'), det_msg = '',<|fim▁hole|> if not box.exec_():
return
try:
prog = unicode(self.program.toPlainText())
cls = compile_user_function(name, unicode(self.documentation.toPlainText()),
self.argument_count.value(), prog)
self.funcs[name] = cls
self.build_function_names_box(scroll_to=name)
except:
error_dialog(self.gui, _('Template functions'),
_('Exception while compiling function'), show=True,
det_msg=traceback.format_exc())
def function_name_edited(self, txt):
self.documentation.setReadOnly(False)
self.argument_count.setReadOnly(False)
self.create_button.setEnabled(True)
self.replace_button.setEnabled(False)
self.program.setReadOnly(False)
def function_index_changed(self, txt):
txt = unicode(txt)
self.create_button.setEnabled(False)
if not txt:
self.argument_count.clear()
self.documentation.clear()
self.documentation.setReadOnly(False)
self.argument_count.setReadOnly(False)
return
func = self.funcs[txt]
self.argument_count.setValue(func.arg_count)
self.documentation.setText(func.doc)
if txt in self.builtins:
if hasattr(func, 'program_text') and func.program_text:
self.program.setPlainText(func.program_text)
elif txt in self.builtin_source_dict:
self.program.setPlainText(self.builtin_source_dict[txt])
else:
self.program.setPlainText(_('function source code not available'))
self.documentation.setReadOnly(True)
self.argument_count.setReadOnly(True)
self.program.setReadOnly(True)
self.delete_button.setEnabled(False)
else:
self.program.setPlainText(func.program_text)
self.delete_button.setEnabled(True)
self.program.setReadOnly(False)
self.replace_button.setEnabled(False)
def replace_button_clicked(self):
self.delete_button_clicked()
self.create_button_clicked()
def refresh_gui(self, gui):
pass
def commit(self):
# formatter_functions().reset_to_builtins()
pref_value = []
for name, cls in self.funcs.iteritems():
if name not in self.builtins:
pref_value.append((cls.name, cls.doc, cls.arg_count, cls.program_text))
self.db.prefs.set('user_template_functions', pref_value)
load_user_template_functions(self.db.library_id, pref_value)
return False
if __name__ == '__main__':
from PyQt4.Qt import QApplication
app = QApplication([])
test_widget('Advanced', 'TemplateFunctions')<|fim▁end|> | show=False)
box.bb.setStandardButtons(box.bb.standardButtons() | QDialogButtonBox.Cancel)
box.det_msg_toggle.setVisible(False) |
<|file_name|>pong.rs<|end_file_name|><|fim▁begin|>use piston::event::*;
use piston::input::Button::Keyboard;
use piston::input::keyboard::Key;
use glutin_window::GlutinWindow as Window;
use piston::window::WindowSettings;
use opengl_graphics::{ GlGraphics, OpenGL };
use game_object::GameObject;
static OPENGL_VERSION: OpenGL = OpenGL::_3_2;
static SIZE: [u32; 2] = [512, 512];
static PADDLE_SIZE: [f64; 2] = [8.0, 32.0];
static PADDLE_ACCEL: f64 = 4000.0;
static PADDLE_FRICTION: f64 = 0.5;
static PADDLE_MAX_SPEED: f64 = 400.0;
static BALL_SIZE: [f64; 2] = [8.0, 8.0];
static BALL_START_MAX_ANGLE: f64 = 60.0;
static BALL_START_SPEED: f64 = 200.0;
static BALL_SPEED_INCREMENT: f64 = 25.0;
struct Pong {
gl: GlGraphics,
p1: GameObject,
p2: GameObject,
ball: GameObject,
up: bool,
down: bool,
server: Player,
p1_score: u32,
p2_score: u32,
}
enum Player {
Left,
Right
}
impl Pong {
fn render(&mut self, args: &RenderArgs) {
use graphics::*;
const BLACK: [f32; 4] = [0.0, 0.0, 0.0, 1.0];
const WHITE: [f32; 4] = [1.0, 1.0, 1.0, 1.0];
let objs = [&self.p1, &self.p2, &self.ball];
self.gl.draw(args.viewport(), |c, gl| {
clear(BLACK, gl);
for obj in objs.iter() {
let rect = [0.0, 0.0, obj.size[0], obj.size[1]];
let transform = c.transform.trans(obj.pos[0], obj.pos[1])
.trans(-obj.size[0] / 2.0, -obj.size[1] / 2.0);
rectangle(WHITE, rect, transform, gl);
}
});
}
fn update(&mut self, args: &UpdateArgs) {
let (ai_up, ai_down) = self.handle_ai_paddle();
Pong::handle_paddle(&mut self.p1, self.up, self.down, args.dt);
Pong::handle_paddle(&mut self.p2, ai_up, ai_down, args.dt);
Pong::handle_game_object(&mut self.p1, args.dt, false);
Pong::handle_game_object(&mut self.p2, args.dt, false);
Pong::handle_game_object(&mut self.ball, args.dt, true);
self.handle_ball();
}
fn start(&mut self) {
self.p1.pos = [self.p1.size[0] / 2.0 + 4.0, (SIZE[1] / 2) as f64];
self.p2.pos = [SIZE[0] as f64 - self.p2.size[0] / 2.0 - 4.0,
(SIZE[1] / 2) as f64];
self.reset();
}
fn reset(&mut self) {
use std::f64::consts::PI;
use rand;
use rand::Rng;
self.ball.pos = [(SIZE[0] / 2) as f64, (SIZE[1] / 2) as f64];
let mut rng = rand::thread_rng();
let max_angle = 2.0 * BALL_START_MAX_ANGLE * PI / 180.0;
let angle = rng.next_f64() * max_angle - max_angle / 2.0;
self.ball.vel = [
angle.cos() * BALL_START_SPEED * self.serve_direction(),
angle.sin() * BALL_START_SPEED
];
}
fn serve_direction(&mut self) -> f64 {
match self.server {
Player::Left => { -1.0 }
Player::Right => { 1.0 }
}
}
fn key_press(&mut self, key: Key) {
match key {
Key::Up => { self.up = true; }
Key::Down => { self.down = true; }
_ => {}
}
}
fn key_release(&mut self, key: Key) {
match key {
Key::Up => { self.up = false; }
Key::Down => { self.down = false; }
_ => {}
}
}
fn handle_game_object(obj: &mut GameObject, dt: f64, bounce: bool) {
obj.pos[0] += obj.vel[0] * dt;
obj.pos[1] += obj.vel[1] * dt;
if obj.pos[1] + obj.size[1] / 2.0 >= SIZE[1] as f64 {
obj.pos[1] = SIZE[1] as f64 - obj.size[1] / 2.0;
if bounce { obj.vel[1] *= -1.0; }
else { obj.vel[1] = 0.0; }
}
if obj.pos[1] - obj.size[1] / 2.0 <= 0.0f64 {
obj.pos[1] = obj.size[1] / 2.0;
if bounce { obj.vel[1] *= -1.0; }
else { obj.vel[1] = 0.0; }
}
}
fn handle_paddle(paddle: &mut GameObject, up: bool, down: bool, dt: f64) {
if up {
paddle.vel[1] -= PADDLE_ACCEL * dt;
} else if down {
paddle.vel[1] += PADDLE_ACCEL * dt;
} else {
let dv = -paddle.vel[1].signum() * PADDLE_ACCEL * dt;
if dv.abs() >= paddle.vel[1].abs() { paddle.vel[1] = 0.0; }
else { paddle.vel[1] += dv; }
}
if paddle.vel[1] > PADDLE_MAX_SPEED {
paddle.vel[1] = PADDLE_MAX_SPEED;
} else if paddle.vel[1] < -PADDLE_MAX_SPEED {
paddle.vel[1] = -PADDLE_MAX_SPEED;
}
}
fn handle_ai_paddle(&self) -> (bool, bool) {
let mut ai_up = false;
let mut ai_down = false;
if self.ball.vel[0] > 0.0 {
let t = (self.p2.pos[0] - self.ball.pos[0]) / self.ball.vel[0];
let target_y = self.ball.pos[1] + self.ball.vel[1] * t;
if target_y > self.p2.pos[1] { ai_down = true; }
else if target_y < self.p2.pos[1] { ai_up = true; }
}
(ai_up, ai_down)
}
fn handle_ball(&mut self) {
for paddle in [&self.p1, &self.p2].iter() {
match self.ball.collision_normal(paddle) {
Some(normal) => {
let dot = self.ball.vel[0] * normal[0] +
self.ball.vel[1] * normal[1];
// reflect ball's velocity off collision normal
self.ball.vel = [
self.ball.vel[0] - 2.0 * normal[0] * dot,
self.ball.vel[1] - 2.0 * normal[1] * dot
];
// apply a bit of paddle y velocity to ball
if normal[0] != 0.0 {
self.ball.vel[1] += paddle.vel[1] * PADDLE_FRICTION;
}
// increment ball x velocity a bit
self.ball.vel[0] += BALL_SPEED_INCREMENT *
self.ball.vel[0].signum();
Pong::correct_collision(&mut self.ball, paddle, normal);
}
None => {}
}
}
if self.ball.pos[0] > SIZE[0] as f64 { self.score(Player::Left); }
else if self.ball.pos[0] < 0.0 { self.score(Player::Right); }
}
fn correct_collision(a: &mut GameObject, b: &GameObject, normal: [f64; 2]) {
if normal == [1.0, 0.0] {
a.pos[0] = b.pos[0] + b.size[0] / 2.0 + a.size[0] / 2.0;
} else if normal == [-1.0, 0.0] {
a.pos[0] = b.pos[0] - b.size[0] / 2.0 - a.size[0] / 2.0;
} else if normal == [0.0, 1.0] {
a.pos[1] = b.pos[1] + b.size[1] / 2.0 + a.size[1] / 2.0;
} else if normal == [0.0, -1.0] {
a.pos[1] = b.pos[1] - b.size[1] / 2.0 - a.size[1] / 2.0;
}
}
fn score(&mut self, player: Player) {
match player {
Player::Left => {
self.server = Player::Right;
self.p1_score += 1;
println!("Player 1 scored! {}-{}", self.p1_score,
self.p2_score);
}
Player::Right => {
self.server = Player::Left;
self.p2_score += 1;
println!("Player 2 scored! {}-{}", self.p1_score,
self.p2_score);
}
}
self.reset();
}
}
pub fn play() {
let window = Window::new(OPENGL_VERSION,
WindowSettings::new("pong", SIZE)
.exit_on_esc(true));
let mut pong = Pong {
gl: GlGraphics::new(OPENGL_VERSION),
p1: GameObject { size: PADDLE_SIZE, ..Default::default() },
p2: GameObject { size: PADDLE_SIZE, ..Default::default() },
ball: GameObject { size: BALL_SIZE, ..Default::default() },
up: false,
down: false,
server: Player::Left,
p1_score: 0,
p2_score: 0,
};
pong.start();
for e in window.events() {<|fim▁hole|> if let Some(Keyboard(key)) = e.release_args() { pong.key_release(key); }
}
}<|fim▁end|> | if let Some(r) = e.render_args() { pong.render(&r); }
if let Some(u) = e.update_args() { pong.update(&u); }
if let Some(Keyboard(key)) = e.press_args() { pong.key_press(key); } |
<|file_name|>_models_py3.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Dict, List, Optional, Union
from azure.core.exceptions import HttpResponseError
import msrest.serialization
from ._dev_spaces_management_client_enums import *
class ContainerHostMapping(msrest.serialization.Model):
"""Container host mapping object specifying the Container host resource ID and its associated Controller resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param container_host_resource_id: ARM ID of the Container Host resource.
:type container_host_resource_id: str
:ivar mapped_controller_resource_id: ARM ID of the mapped Controller resource.
:vartype mapped_controller_resource_id: str
"""
_validation = {
'mapped_controller_resource_id': {'readonly': True},
}
_attribute_map = {
'container_host_resource_id': {'key': 'containerHostResourceId', 'type': 'str'},
'mapped_controller_resource_id': {'key': 'mappedControllerResourceId', 'type': 'str'},
}
def __init__(
self,
*,
container_host_resource_id: Optional[str] = None,
**kwargs
):
super(ContainerHostMapping, self).__init__(**kwargs)
self.container_host_resource_id = container_host_resource_id
self.mapped_controller_resource_id = None
class Resource(msrest.serialization.Model):
"""An Azure resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
class TrackedResource(Resource):
"""The resource model definition for a ARM tracked top level resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param tags: A set of tags. Tags for the Azure resource.
:type tags: dict[str, str]
:param location: Region where the Azure resource is located.
:type location: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'location': {'key': 'location', 'type': 'str'},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
location: Optional[str] = None,
**kwargs
):
super(TrackedResource, self).__init__(**kwargs)
self.tags = tags
self.location = location
class Controller(TrackedResource):
"""Controller.<|fim▁hole|>
All required parameters must be populated in order to send to Azure.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param tags: A set of tags. Tags for the Azure resource.
:type tags: dict[str, str]
:param location: Region where the Azure resource is located.
:type location: str
:param sku: Required. Model representing SKU for Azure Dev Spaces Controller.
:type sku: ~dev_spaces_management_client.models.Sku
:ivar provisioning_state: Provisioning state of the Azure Dev Spaces Controller. Possible
values include: "Succeeded", "Failed", "Canceled", "Updating", "Creating", "Deleting",
"Deleted".
:vartype provisioning_state: str or ~dev_spaces_management_client.models.ProvisioningState
:ivar host_suffix: DNS suffix for public endpoints running in the Azure Dev Spaces Controller.
:vartype host_suffix: str
:ivar data_plane_fqdn: DNS name for accessing DataPlane services.
:vartype data_plane_fqdn: str
:ivar target_container_host_api_server_fqdn: DNS of the target container host's API server.
:vartype target_container_host_api_server_fqdn: str
:param target_container_host_resource_id: Required. Resource ID of the target container host.
:type target_container_host_resource_id: str
:param target_container_host_credentials_base64: Required. Credentials of the target container
host (base64).
:type target_container_host_credentials_base64: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'sku': {'required': True},
'provisioning_state': {'readonly': True},
'host_suffix': {'readonly': True},
'data_plane_fqdn': {'readonly': True},
'target_container_host_api_server_fqdn': {'readonly': True},
'target_container_host_resource_id': {'required': True},
'target_container_host_credentials_base64': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'location': {'key': 'location', 'type': 'str'},
'sku': {'key': 'sku', 'type': 'Sku'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'host_suffix': {'key': 'properties.hostSuffix', 'type': 'str'},
'data_plane_fqdn': {'key': 'properties.dataPlaneFqdn', 'type': 'str'},
'target_container_host_api_server_fqdn': {'key': 'properties.targetContainerHostApiServerFqdn', 'type': 'str'},
'target_container_host_resource_id': {'key': 'properties.targetContainerHostResourceId', 'type': 'str'},
'target_container_host_credentials_base64': {'key': 'properties.targetContainerHostCredentialsBase64', 'type': 'str'},
}
def __init__(
self,
*,
sku: "Sku",
target_container_host_resource_id: str,
target_container_host_credentials_base64: str,
tags: Optional[Dict[str, str]] = None,
location: Optional[str] = None,
**kwargs
):
super(Controller, self).__init__(tags=tags, location=location, **kwargs)
self.sku = sku
self.provisioning_state = None
self.host_suffix = None
self.data_plane_fqdn = None
self.target_container_host_api_server_fqdn = None
self.target_container_host_resource_id = target_container_host_resource_id
self.target_container_host_credentials_base64 = target_container_host_credentials_base64
class ControllerConnectionDetails(msrest.serialization.Model):
"""ControllerConnectionDetails.
:param orchestrator_specific_connection_details: Base class for types that supply values used
to connect to container orchestrators.
:type orchestrator_specific_connection_details:
~dev_spaces_management_client.models.OrchestratorSpecificConnectionDetails
"""
_attribute_map = {
'orchestrator_specific_connection_details': {'key': 'orchestratorSpecificConnectionDetails', 'type': 'OrchestratorSpecificConnectionDetails'},
}
def __init__(
self,
*,
orchestrator_specific_connection_details: Optional["OrchestratorSpecificConnectionDetails"] = None,
**kwargs
):
super(ControllerConnectionDetails, self).__init__(**kwargs)
self.orchestrator_specific_connection_details = orchestrator_specific_connection_details
class ControllerConnectionDetailsList(msrest.serialization.Model):
"""ControllerConnectionDetailsList.
:param connection_details_list: List of Azure Dev Spaces Controller connection details.
:type connection_details_list:
list[~dev_spaces_management_client.models.ControllerConnectionDetails]
"""
_attribute_map = {
'connection_details_list': {'key': 'connectionDetailsList', 'type': '[ControllerConnectionDetails]'},
}
def __init__(
self,
*,
connection_details_list: Optional[List["ControllerConnectionDetails"]] = None,
**kwargs
):
super(ControllerConnectionDetailsList, self).__init__(**kwargs)
self.connection_details_list = connection_details_list
class ControllerList(msrest.serialization.Model):
"""ControllerList.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: List of Azure Dev Spaces Controllers.
:type value: list[~dev_spaces_management_client.models.Controller]
:ivar next_link: The URI that can be used to request the next page for list of Azure Dev Spaces
Controllers.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Controller]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["Controller"]] = None,
**kwargs
):
super(ControllerList, self).__init__(**kwargs)
self.value = value
self.next_link = None
class ControllerUpdateParameters(msrest.serialization.Model):
"""Parameters for updating an Azure Dev Spaces Controller.
:param tags: A set of tags. Tags for the Azure Dev Spaces Controller.
:type tags: dict[str, str]
:param target_container_host_credentials_base64: Credentials of the target container host
(base64).
:type target_container_host_credentials_base64: str
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'target_container_host_credentials_base64': {'key': 'properties.targetContainerHostCredentialsBase64', 'type': 'str'},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
target_container_host_credentials_base64: Optional[str] = None,
**kwargs
):
super(ControllerUpdateParameters, self).__init__(**kwargs)
self.tags = tags
self.target_container_host_credentials_base64 = target_container_host_credentials_base64
class DevSpacesErrorResponse(msrest.serialization.Model):
"""Error response indicates that the service is not able to process the incoming request. The reason is provided in the error message.
:param error: The details of the error.
:type error: ~dev_spaces_management_client.models.ErrorDetails
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'ErrorDetails'},
}
def __init__(
self,
*,
error: Optional["ErrorDetails"] = None,
**kwargs
):
super(DevSpacesErrorResponse, self).__init__(**kwargs)
self.error = error
class ErrorDetails(msrest.serialization.Model):
"""ErrorDetails.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code: Status code for the error.
:vartype code: str
:ivar message: Error message describing the error in detail.
:vartype message: str
:ivar target: The target of the particular error.
:vartype target: str
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
'target': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ErrorDetails, self).__init__(**kwargs)
self.code = None
self.message = None
self.target = None
class OrchestratorSpecificConnectionDetails(msrest.serialization.Model):
"""Base class for types that supply values used to connect to container orchestrators.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: KubernetesConnectionDetails.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar instance_type: Required. Gets the Instance type.Constant filled by server.
:vartype instance_type: str
"""
_validation = {
'instance_type': {'required': True, 'readonly': True},
}
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
}
_subtype_map = {
'instance_type': {'Kubernetes': 'KubernetesConnectionDetails'}
}
def __init__(
self,
**kwargs
):
super(OrchestratorSpecificConnectionDetails, self).__init__(**kwargs)
self.instance_type = None # type: Optional[str]
class KubernetesConnectionDetails(OrchestratorSpecificConnectionDetails):
"""Contains information used to connect to a Kubernetes cluster.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar instance_type: Required. Gets the Instance type.Constant filled by server.
:vartype instance_type: str
:param kube_config: Gets the kubeconfig for the cluster.
:type kube_config: str
"""
_validation = {
'instance_type': {'required': True, 'readonly': True},
}
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
'kube_config': {'key': 'kubeConfig', 'type': 'str'},
}
def __init__(
self,
*,
kube_config: Optional[str] = None,
**kwargs
):
super(KubernetesConnectionDetails, self).__init__(**kwargs)
self.instance_type = 'Kubernetes' # type: str
self.kube_config = kube_config
class ListConnectionDetailsParameters(msrest.serialization.Model):
"""Parameters for listing connection details of an Azure Dev Spaces Controller.
All required parameters must be populated in order to send to Azure.
:param target_container_host_resource_id: Required. Resource ID of the target container host
mapped to the Azure Dev Spaces Controller.
:type target_container_host_resource_id: str
"""
_validation = {
'target_container_host_resource_id': {'required': True},
}
_attribute_map = {
'target_container_host_resource_id': {'key': 'targetContainerHostResourceId', 'type': 'str'},
}
def __init__(
self,
*,
target_container_host_resource_id: str,
**kwargs
):
super(ListConnectionDetailsParameters, self).__init__(**kwargs)
self.target_container_host_resource_id = target_container_host_resource_id
class ResourceProviderOperationDefinition(msrest.serialization.Model):
"""ResourceProviderOperationDefinition.
:param name: Resource provider operation name.
:type name: str
:param display:
:type display: ~dev_spaces_management_client.models.ResourceProviderOperationDisplay
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'ResourceProviderOperationDisplay'},
}
def __init__(
self,
*,
name: Optional[str] = None,
display: Optional["ResourceProviderOperationDisplay"] = None,
**kwargs
):
super(ResourceProviderOperationDefinition, self).__init__(**kwargs)
self.name = name
self.display = display
class ResourceProviderOperationDisplay(msrest.serialization.Model):
"""ResourceProviderOperationDisplay.
:param provider: Name of the resource provider.
:type provider: str
:param resource: Name of the resource type.
:type resource: str
:param operation: Name of the resource provider operation.
:type operation: str
:param description: Description of the resource provider operation.
:type description: str
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
*,
provider: Optional[str] = None,
resource: Optional[str] = None,
operation: Optional[str] = None,
description: Optional[str] = None,
**kwargs
):
super(ResourceProviderOperationDisplay, self).__init__(**kwargs)
self.provider = provider
self.resource = resource
self.operation = operation
self.description = description
class ResourceProviderOperationList(msrest.serialization.Model):
"""ResourceProviderOperationList.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: Resource provider operations list.
:type value: list[~dev_spaces_management_client.models.ResourceProviderOperationDefinition]
:ivar next_link: The URI that can be used to request the next page for list of Azure
operations.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ResourceProviderOperationDefinition]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["ResourceProviderOperationDefinition"]] = None,
**kwargs
):
super(ResourceProviderOperationList, self).__init__(**kwargs)
self.value = value
self.next_link = None
class Sku(msrest.serialization.Model):
"""Model representing SKU for Azure Dev Spaces Controller.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the SKU for Azure Dev Spaces Controller. Possible values
include: "S1".
:type name: str or ~dev_spaces_management_client.models.SkuName
:param tier: The tier of the SKU for Azure Dev Spaces Controller. Possible values include:
"Standard".
:type tier: str or ~dev_spaces_management_client.models.SkuTier
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
}
def __init__(
self,
*,
name: Union[str, "SkuName"],
tier: Optional[Union[str, "SkuTier"]] = None,
**kwargs
):
super(Sku, self).__init__(**kwargs)
self.name = name
self.tier = tier<|fim▁end|> |
Variables are only populated by the server, and will be ignored when sending a request. |
<|file_name|>encode.rs<|end_file_name|><|fim▁begin|>use std::cmp;
use std::io::{self, Write};
use http::io::AtomicWrite;
/// Encoders to handle different Transfer-Encodings.
#[derive(Debug, Clone)]
pub struct Encoder {
kind: Kind,
}
#[derive(Debug, PartialEq, Clone)]
enum Kind {
/// An Encoder for when Transfer-Encoding includes `chunked`.
Chunked(Chunked),
/// An Encoder for when Content-Length is set.
///
/// Enforces that the body is not longer than the Content-Length header.
Length(u64),
}
impl Encoder {
pub fn chunked() -> Encoder {
Encoder {
kind: Kind::Chunked(Chunked::Init),
}
}
pub fn length(len: u64) -> Encoder {
Encoder {<|fim▁hole|> }
}
pub fn is_eof(&self) -> bool {
match self.kind {
Kind::Length(0) |
Kind::Chunked(Chunked::End) => true,
_ => false
}
}
pub fn encode<W: AtomicWrite>(&mut self, w: &mut W, msg: &[u8]) -> io::Result<usize> {
match self.kind {
Kind::Chunked(ref mut chunked) => {
chunked.encode(w, msg)
},
Kind::Length(ref mut remaining) => {
let n = {
let max = cmp::min(*remaining as usize, msg.len());
trace!("sized write, len = {}", max);
let slice = &msg[..max];
try!(w.write_atomic(&[slice]))
};
if n == 0 {
return Err(io::Error::new(io::ErrorKind::WouldBlock, "would block"));
}
*remaining -= n as u64;
debug!("encoded {} bytes", n);
trace!("encode sized complete, remaining = {}", remaining);
Ok(n)
},
}
}
}
#[derive(Debug, PartialEq, Clone)]
enum Chunked {
Init,
Size(ChunkSize),
SizeCr,
SizeLf,
Body(usize),
BodyCr,
BodyLf,
End,
}
impl Chunked {
fn encode<W: AtomicWrite>(&mut self, w: &mut W, msg: &[u8]) -> io::Result<usize> {
match *self {
Chunked::Init => {
let mut size = ChunkSize {
bytes: [0; CHUNK_SIZE_MAX_BYTES],
pos: 0,
len: 0,
};
trace!("chunked write, size = {:?}", msg.len());
write!(&mut size, "{:X}", msg.len())
.expect("CHUNK_SIZE_MAX_BYTES should fit any usize");
*self = Chunked::Size(size);
}
Chunked::End => return Ok(0),
_ => {}
}
let mut n = {
let pieces = match *self {
Chunked::Init => unreachable!("Chunked::Init should have become Chunked::Size"),
Chunked::Size(ref size) => [
&size.bytes[size.pos.into() .. size.len.into()],
&b"\r\n"[..],
msg,
&b"\r\n"[..],
],
Chunked::SizeCr => [
&b""[..],
&b"\r\n"[..],
msg,
&b"\r\n"[..],
],
Chunked::SizeLf => [
&b""[..],
&b"\n"[..],
msg,
&b"\r\n"[..],
],
Chunked::Body(pos) => [
&b""[..],
&b""[..],
&msg[pos..],
&b"\r\n"[..],
],
Chunked::BodyCr => [
&b""[..],
&b""[..],
&b""[..],
&b"\r\n"[..],
],
Chunked::BodyLf => [
&b""[..],
&b""[..],
&b""[..],
&b"\n"[..],
],
Chunked::End => unreachable!("Chunked::End shouldn't write more")
};
try!(w.write_atomic(&pieces))
};
while n > 0 {
match *self {
Chunked::Init => unreachable!("Chunked::Init should have become Chunked::Size"),
Chunked::Size(mut size) => {
n = size.update(n);
if size.len == 0 {
*self = Chunked::SizeCr;
} else {
*self = Chunked::Size(size);
}
},
Chunked::SizeCr => {
*self = Chunked::SizeLf;
n -= 1;
}
Chunked::SizeLf => {
*self = Chunked::Body(0);
n -= 1;
}
Chunked::Body(pos) => {
let left = msg.len() - pos;
if n >= left {
*self = Chunked::BodyCr;
n -= left;
} else {
*self = Chunked::Body(pos + n);
n = 0;
}
}
Chunked::BodyCr => {
*self = Chunked::BodyLf;
n -= 1;
}
Chunked::BodyLf => {
assert!(n == 1);
*self = if msg.len() == 0 {
Chunked::End
} else {
Chunked::Init
};
n = 0;
},
Chunked::End => unreachable!("Chunked::End shouldn't have any to write")
}
}
match *self {
Chunked::Init |
Chunked::End => Ok(msg.len()),
_ => Err(io::Error::new(io::ErrorKind::WouldBlock, "chunked incomplete"))
}
}
}
#[cfg(target_pointer_width = "32")]
const USIZE_BYTES: usize = 4;
#[cfg(target_pointer_width = "64")]
const USIZE_BYTES: usize = 8;
// each byte will become 2 hex
const CHUNK_SIZE_MAX_BYTES: usize = USIZE_BYTES * 2;
#[derive(Clone, Copy)]
struct ChunkSize {
bytes: [u8; CHUNK_SIZE_MAX_BYTES],
pos: u8,
len: u8,
}
impl ChunkSize {
fn update(&mut self, n: usize) -> usize {
let diff = (self.len - self.pos).into();
if n >= diff {
self.pos = 0;
self.len = 0;
n - diff
} else {
self.pos += n as u8; // just verified it was a small usize
0
}
}
}
impl ::std::fmt::Debug for ChunkSize {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct("ChunkSize")
.field("bytes", &&self.bytes[..self.len.into()])
.field("pos", &self.pos)
.finish()
}
}
impl ::std::cmp::PartialEq for ChunkSize {
fn eq(&self, other: &ChunkSize) -> bool {
self.len == other.len &&
self.pos == other.pos &&
(&self.bytes[..]) == (&other.bytes[..])
}
}
impl io::Write for ChunkSize {
fn write(&mut self, msg: &[u8]) -> io::Result<usize> {
let n = (&mut self.bytes[self.len.into() ..]).write(msg)
.expect("&mut [u8].write() cannot error");
self.len += n as u8; // safe because bytes is never bigger than 256
Ok(n)
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::Encoder;
use mock::{AsyncIo, Buf};
#[test]
fn test_chunked_encode_sync() {
let mut dst = Buf::new();
let mut encoder = Encoder::chunked();
encoder.encode(&mut dst, b"foo bar").unwrap();
encoder.encode(&mut dst, b"baz quux herp").unwrap();
encoder.encode(&mut dst, b"").unwrap();
assert_eq!(&dst[..], &b"7\r\nfoo bar\r\nD\r\nbaz quux herp\r\n0\r\n\r\n"[..]);
}
#[test]
fn test_chunked_encode_async() {
let mut dst = AsyncIo::new(Buf::new(), 7);
let mut encoder = Encoder::chunked();
assert!(encoder.encode(&mut dst, b"foo bar").is_err());
dst.block_in(6);
assert_eq!(7, encoder.encode(&mut dst, b"foo bar").unwrap());
dst.block_in(30);
assert_eq!(13, encoder.encode(&mut dst, b"baz quux herp").unwrap());
encoder.encode(&mut dst, b"").unwrap();
assert_eq!(&dst[..], &b"7\r\nfoo bar\r\nD\r\nbaz quux herp\r\n0\r\n\r\n"[..]);
}
#[test]
fn test_sized_encode() {
let mut dst = Buf::new();
let mut encoder = Encoder::length(8);
encoder.encode(&mut dst, b"foo bar").unwrap();
assert_eq!(encoder.encode(&mut dst, b"baz").unwrap(), 1);
assert_eq!(dst, b"foo barb");
}
}<|fim▁end|> | kind: Kind::Length(len), |
<|file_name|>ActivityRotAnimation.java<|end_file_name|><|fim▁begin|>package com.landenlabs.all_flipanimation;
/**
* Copyright (c) 2015 Dennis Lang (LanDen Labs) [email protected]
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
* associated documentation files (the "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
* following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
* LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
* NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* @author Dennis Lang (3/21/2015)
* @see http://landenlabs.com
*
*/<|fim▁hole|>
import android.animation.ObjectAnimator;
import android.app.Activity;
import android.graphics.Camera;
import android.graphics.Matrix;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.os.Handler;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.animation.Animation;
import android.view.animation.AnimationSet;
import android.view.animation.LinearInterpolator;
import android.view.animation.Transformation;
import android.widget.CheckBox;
import android.widget.TextView;
/**
* Demonstrate rotating View animation using two rotating animations.
*
* @author Dennis Lang (LanDen Labs)
* @see <a href="http://landenlabs.com/android/index-m.html"> author's web-site </a>
* // http://www.inter-fuser.com/2009/08/android-animations-3d-flip.html
*/
public class ActivityRotAnimation extends Activity {
// ---- Layout ----
View mView1;
View mView2;
View mClickView;
DrawView mDrawView;
TextView mAngle1;
TextView mAngle2;
// ---- Data ----
float mCameraZ = -25;
Flip3dAnimation mRotation1;
Flip3dAnimation mRotation2;
boolean mRotateYaxis = true;
boolean mIsForward = true;
boolean mAutoMode = false;
MediaPlayer mSoundClick;
MediaPlayer mSoundShut;
// ---- Timer ----
private Handler m_handler = new Handler();
private int mDurationMsec = 3000;
private Runnable m_updateElapsedTimeTask = new Runnable() {
public void run() {
animateIt();
m_handler.postDelayed(this, mDurationMsec); // Re-execute after msec
}
};
/**
* Called when the activity is first created.
*/
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.rot_animation);
mView1 = Ui.viewById(this, R.id.view1);
mView2 = Ui.viewById(this, R.id.view2);
// Create a new 3D rotation with the supplied parameter
mRotation1 = new Flip3dAnimation();
mRotation2 = new Flip3dAnimation();
Ui.<TextView>viewById(this, R.id.side_title).setText("Rotating Animation");
setupUI();
}
/**
* Start animation.
*/
public void animateIt() {
ObjectAnimator.ofFloat(mClickView, View.ALPHA, mClickView.getAlpha(), 0).start();
final float end = 90.0f;
if (mIsForward) {
mRotation1.mFromDegrees = 0.0f;
mRotation1.mToDegrees = end;
mRotation2.mFromDegrees = -end;
mRotation2.mToDegrees = 0.0f;
} else {
mRotation1.mFromDegrees = end;
mRotation1.mToDegrees = 0.0f;
mRotation2.mFromDegrees = 0.0f;
mRotation2.mToDegrees = -end;
}
mIsForward = !mIsForward;
if (mRotateYaxis) {
mRotation1.mCenterX = mView1.getWidth();
mRotation1.mCenterY = mView1.getHeight() / 2.0f;
mRotation2.mCenterX = 0.0f;
mRotation2.mCenterY = mView2.getHeight() / 2.0f;
} else {
mRotation1.mCenterY = 0.0f; // mView1.getHeight();
mRotation1.mCenterX = mView1.getWidth() / 2.0f;
mRotation2.mCenterY = mView1.getHeight(); // 0.0f;
mRotation2.mCenterX = mView2.getWidth() / 2.0f;
}
mRotation1.reset(mView1, mDurationMsec, mCameraZ);
mRotation2.reset(mView2, mDurationMsec, mCameraZ);
mRotation2.setAnimationListener(new Animation.AnimationListener() {
@Override public void onAnimationStart(Animation animation) { }
@Override public void onAnimationEnd(Animation animation) {
mSoundShut.start();
}
@Override public void onAnimationRepeat(Animation animation) { }
});
// Run both animations in parallel.
AnimationSet animationSet = new AnimationSet(true);
animationSet.setInterpolator(new LinearInterpolator());
animationSet.addAnimation(mRotation1);
animationSet.addAnimation(mRotation2);
animationSet.start();
}
public class Flip3dAnimation extends Animation {
float mFromDegrees;
float mToDegrees;
float mCenterX = 0;
float mCenterY = 0;
float mCameraZ = -8;
Camera mCamera;
View mView;
public Flip3dAnimation() {
setFillEnabled(true);
setFillAfter(true);
setFillBefore(true);
}
public void reset(View view, int durationMsec, float cameraZ) {
mCameraZ = cameraZ;
setDuration(durationMsec);
view.clearAnimation(); // This is very important to get 2nd..nth run to work.
view.setAnimation(this);
mView = view;
}
@Override
public void initialize(int width, int height, int parentWidth, int parentHeight) {
super.initialize(width, height, parentWidth, parentHeight);
mCamera = new Camera();
}
@Override
protected void applyTransformation(float interpolatedTime, Transformation trans) {
final float fromDegrees = mFromDegrees;
float degrees = fromDegrees + ((mToDegrees - fromDegrees) * interpolatedTime);
final Camera camera = mCamera;
final Matrix matrix = trans.getMatrix();
camera.save();
camera.setLocation(0, 0, mCameraZ);
if (mRotateYaxis)
camera.rotateY(degrees);
else
camera.rotateX(degrees);
camera.getMatrix(matrix);
camera.restore();
matrix.preTranslate(-mCenterX, -mCenterY);
matrix.postTranslate(mCenterX, mCenterY);
final float degree3 = degrees;
if (mView == mView1) {
mDrawView.setAngle1(degree3);
mAngle1.setText(String.format("%.0f°", degree3));
} else {
mDrawView.setAngle2(degree3);
mAngle2.setText(String.format("%.0f°", degree3));
}
}
}
/**
* Build User Interface - setup callbacks.
*/
private void setupUI() {
mSoundClick = MediaPlayer.create(this, R.raw.click);
// mSoundClick.setVolume(0.5f, 0.5f);
mSoundShut = MediaPlayer.create(this, R.raw.shut);
// mSoundShut.setVolume(0.3f, 0.3f);
final TextView title = (TextView) this.findViewById(R.id.title);
mClickView = this.findViewById(R.id.click_view);
mClickView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mSoundClick.start();
animateIt();
}
});
final SlideBar seekspeedSB = new SlideBar(this.findViewById(R.id.seekSpeed), "Delay:");
seekspeedSB.setValueChanged(new SlideBar.ValueChanged() {
@Override
public float onValueChanged(View v, float value) {
mDurationMsec = (int) (value = 100 + value * 100);
title.setText(String.format("Delay:%d CameraZ:%.0f", mDurationMsec, mCameraZ));
return value;
}
});
final SlideBar cameraZpos = new SlideBar(this.findViewById(R.id.cameraZpos), "CamZ:");
cameraZpos.setProgress((int) (mCameraZ / -2 + 50));
cameraZpos.setValueChanged(new SlideBar.ValueChanged() {
@Override
public float onValueChanged(View v, float value) {
mCameraZ = value = (50 - value) * 2.0f;
title.setText(String.format("Delay:%d CameraZ:%.0f", mDurationMsec, mCameraZ));
return value;
}
});
final CheckBox autoFlipCb = Ui.viewById(this, R.id.autoflip);
autoFlipCb.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mAutoMode = ((CheckBox) v).isChecked();
if (mAutoMode) {
m_handler.postDelayed(m_updateElapsedTimeTask, 0);
} else {
m_handler.removeCallbacks(m_updateElapsedTimeTask);
}
}
});
final CheckBox yaxisCb = Ui.viewById(this, R.id.yaxis);
mRotateYaxis = yaxisCb.isChecked();
yaxisCb.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
boolean autoMode = mAutoMode;
if (autoMode)
autoFlipCb.performClick(); // Stop automatic animation.
mRotateYaxis = ((CheckBox) v).isChecked();
if (autoMode)
autoFlipCb.performClick(); // Restart automatic animation.
}
});
mDrawView = Ui.viewById(this, R.id.drawView);
mAngle1 = Ui.viewById(this, R.id.angle1);
mAngle2 = Ui.viewById(this, R.id.angle2);
}
}<|fim▁end|> | |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>import os.path
from django.core.urlresolvers import reverse
from django.template import Context, Template
from django.template.defaultfilters import slugify
from django.test import TestCase
from django.test.client import Client
from radpress.compat import get_user_model
User = get_user_model()
from radpress.models import Article, Page, Tag
from radpress.readers import get_reader
from radpress.settings import CONTEXT_DATA, MORE_TAG
from radpress.templatetags.radpress_tags import radpress_get_url
class RadpressTestCase(TestCase):
fixtures = [os.path.join(os.path.dirname(__file__), 'data.json')]
def setUp(self):
self.client = Client()
# define article
self.article1 = Article.objects.get(pk=1)
# define user
self.user1 = User.objects.get(username='gokmen')
self.user1.set_password('secret')
self.user1.save()
# define second user password
self.user2 = User.objects.get(username='defne')
self.user2.set_password('secret')
self.user2.save()
def render_template(self, template, context):
context = Context(context)
return Template(template).render(context)
class RadpressReaderTestCase(RadpressTestCase):
markup = None
file_path = None
def setUp(self):
# default markup name is reStructuredText
self.reader = get_reader(markup=self.markup)
if self.file_path is not None:
# default content_body, metada
file_path = os.path.join(os.path.dirname(__file__), self.file_path)
content = open(file_path).read()
self.content_body, self.metadata = self.reader(content).read()
def test_check_metadata(self):
self.assertEqual(self.metadata['image'], '1')
self.assertTrue(self.metadata['published'])
self.assertEqual(self.metadata['slug'], 'samuel-l-ipsum')
self.assertEqual(self.metadata['title'], 'Samuel L. Ipsum')
for tag in ['ipsum', 'samuel', 'lorem']:
self.assertIn(tag, self.metadata['tags'])
def test_contents(self):
for article in Article.objects.filter(markup=self.markup):
content_body, metadata = self.reader(article.content).read()
self.assertEqual(article.content_body, content_body)
def test_more_tag(self):<|fim▁hole|>
class BaseTest(RadpressTestCase):
def test_all_published_articles(self):
# check published article count
self.assertEqual(Article.objects.all_published().count(), 1)
# check published page count
self.assertEqual(Page.objects.all_published().count(), 2)
def test_open_private_and_public_article_details(self):
for article in Article.objects.all():
status_code = 200 if article.is_published else 404
response = self.client.get(article.get_absolute_url())
self.assertEqual(response.status_code, status_code)
def test_preview_page(self):
# try to get response with GET method
response = self.client.get(reverse('radpress-preview'))
expected_status_code = 302 # because, login required
self.assertEqual(response.status_code, expected_status_code)
self.client.login(username='gokmen', password='secret')
response = self.client.get(reverse('radpress-preview'))
expected_status_code = 405 # because, view only allows `post` method
self.assertEqual(response.status_code, expected_status_code)
def test_slugs(self):
for article in Article.objects.all():
slug = slugify(article.slug)
self.assertEqual(article.slug, slug)
def test_tags(self):
# checks tag count from fixture
self.assertEqual(Tag.objects.count(), 2)
# create new tag and check slug
tag_name = 'how I met your mother'
tag = Tag.objects.create(name=tag_name)
self.assertEqual(tag.slug, slugify(tag_name))
# add tag to a published article and check count of tags
self.article1.articletag_set.create(tag=tag)
self.assertEqual(self.article1.tags.count(), 1)
# try to filter articles for tags
articles = Article.objects.filter(tags__name=tag_name)
self.assertEqual(articles.count(), 1)
def test_access_not_published_article(self):
"""
If user is not authenticated, user can not access not published
articles and pages.
"""
article = Article.objects.get(slug='i-have-a-dream')
page = Page.objects.get(slug='page-3-not-published')
def get_responses():
response_article = self.client.get(
reverse('radpress-article-detail', args=[article.slug]))
response_page = self.client.get(
reverse('radpress-page-detail', args=[page.slug]))
return response_article, response_page
# if user is not authenticated to site:
response_article, response_page = get_responses()
self.assertEqual(response_article.status_code, 404)
self.assertEqual(response_page.status_code, 404)
# if user is not superuser and not author of the entries:
self.client.login(username=self.user2.username, password='secret')
self.assertFalse(self.user2.is_superuser)
response_article, response_page = get_responses()
self.assertEqual(response_article.status_code, 404)
self.assertEqual(response_page.status_code, 404)
# if user is superuser but not the author of entries:
self.user2.is_superuser = True
self.user2.save()
self.assertTrue(self.user2.is_superuser)
response_article, response_page = get_responses()
self.assertEqual(response_article.status_code, 200)
self.assertEqual(response_page.status_code, 200)
# if user is not superuser but the author of entries:
article.author = self.user2
article.save()
self.user2.is_superuser = False
self.user2.save()
self.assertFalse(self.user2.is_superuser)
response_article, response_page = get_responses()
self.assertEqual(response_article.status_code, 200)
self.assertEqual(response_page.status_code, 404)
def test_context_data(self):
"""
Important! All context data keys should be start with `RADPRESS_`
prefix and uppercase.
"""
for context in CONTEXT_DATA.keys():
self.assertTrue(context.startswith('RADPRESS_'))
self.assertEqual(context, context.upper())
def test_radpress_get_url_tag(self):
response = self.client.get(reverse('radpress-article-list'))
self.assertIn('DOMAIN', response.context_data)
context = response.context_data
for article in Article.objects.all():
article_url = context['DOMAIN'] + article.get_absolute_url()
expected_url = radpress_get_url(context, article)
self.assertEqual(article_url, expected_url)
class RestructuredtextTest(RadpressReaderTestCase):
markup = 'restructuredtext'
file_path = 'test_content.rst'
def test_pygmentize(self):
self.assertIn('<table class="highlighttable">', self.content_body)
self.assertIn('<td class="linenos">', self.content_body)<|fim▁end|> | self.assertIn(MORE_TAG, self.content_body)
|
<|file_name|>inferno-router.js<|end_file_name|><|fim▁begin|>/*!
* inferno-router v0.7.14
* (c) 2016 Dominic Gannaway
* Released under the MIT License.
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.InfernoRouter = factory());
}(this, function () { 'use strict';
var NO_RENDER = 'NO_RENDER';
// Runs only once in applications lifetime
var isBrowser = typeof window !== 'undefined' && window.document;
function isArray(obj) {
return obj instanceof Array;
}
function isNullOrUndefined(obj) {
return isUndefined(obj) || isNull(obj);
}
function isNull(obj) {
return obj === null;
}
function isUndefined(obj) {
return obj === undefined;
}
function VNode(blueprint) {
this.bp = blueprint;
this.dom = null;
this.instance = null;
this.tag = null;
this.children = null;
this.style = null;
this.className = null;
this.attrs = null;
this.events = null;
this.hooks = null;
this.key = null;
this.clipData = null;
}
VNode.prototype = {
setAttrs: function setAttrs(attrs) {
this.attrs = attrs;
return this;
},
setTag: function setTag(tag) {
this.tag = tag;
return this;
},
setStyle: function setStyle(style) {
this.style = style;
return this;
},
setClassName: function setClassName(className) {
this.className = className;
return this;
},
setChildren: function setChildren(children) {
this.children = children;
return this;
},
setHooks: function setHooks(hooks) {
this.hooks = hooks;
return this;
},
setEvents: function setEvents(events) {
this.events = events;
return this;
},
setKey: function setKey(key) {
this.key = key;
return this;
}
};
function createVNode(bp) {
return new VNode(bp);
}
function VPlaceholder() {
this.placeholder = true;
this.dom = null;
}
function createVPlaceholder() {
return new VPlaceholder();
}
function constructDefaults(string, object, value) {
/* eslint no-return-assign: 0 */
string.split(',').forEach(function (i) { return object[i] = value; });
}
var xlinkNS = 'http://www.w3.org/1999/xlink';
var xmlNS = 'http://www.w3.org/XML/1998/namespace';
var strictProps = {};
var booleanProps = {};
var namespaces = {};
var isUnitlessNumber = {};
constructDefaults('xlink:href,xlink:arcrole,xlink:actuate,xlink:role,xlink:titlef,xlink:type', namespaces, xlinkNS);
constructDefaults('xml:base,xml:lang,xml:space', namespaces, xmlNS);
constructDefaults('volume,value', strictProps, true);
constructDefaults('muted,scoped,loop,open,checked,default,capture,disabled,selected,readonly,multiple,required,autoplay,controls,seamless,reversed,allowfullscreen,novalidate', booleanProps, true);
constructDefaults('animationIterationCount,borderImageOutset,borderImageSlice,borderImageWidth,boxFlex,boxFlexGroup,boxOrdinalGroup,columnCount,flex,flexGrow,flexPositive,flexShrink,flexNegative,flexOrder,gridRow,gridColumn,fontWeight,lineClamp,lineHeight,opacity,order,orphans,tabSize,widows,zIndex,zoom,fillOpacity,floodOpacity,stopOpacity,strokeDasharray,strokeDashoffset,strokeMiterlimit,strokeOpacity,strokeWidth,', isUnitlessNumber, true);
var screenWidth = isBrowser && window.screen.width;
var screenHeight = isBrowser && window.screen.height;
var scrollX = 0;
var scrollY = 0;
var lastScrollTime = 0;
if (isBrowser) {
window.onscroll = function () {
scrollX = window.scrollX;
scrollY = window.scrollY;
lastScrollTime = performance.now();
};
window.resize = function () {
scrollX = window.scrollX;
scrollY = window.scrollY;
screenWidth = window.screen.width;
screenHeight = window.screen.height;
lastScrollTime = performance.now();
};
}
function Lifecycle() {
this._listeners = [];
this.scrollX = null;
this.scrollY = null;
this.screenHeight = screenHeight;
this.screenWidth = screenWidth;
}
Lifecycle.prototype = {
refresh: function refresh() {
this.scrollX = isBrowser && window.scrollX;
this.scrollY = isBrowser && window.scrollY;
},
addListener: function addListener(callback) {
this._listeners.push(callback);
},
trigger: function trigger() {
var this$1 = this;
for (var i = 0; i < this._listeners.length; i++) {
this$1._listeners[i]();
}
}
};
var noOp = 'Inferno Error: Can only update a mounted or mounting component. This usually means you called setState() or forceUpdate() on an unmounted component. This is a no-op.';
// Copy of the util from dom/util, otherwise it makes massive bundles
function getActiveNode() {
return document.activeElement;
}
// Copy of the util from dom/util, otherwise it makes massive bundles
function resetActiveNode(activeNode) {
if (activeNode !== document.body && document.activeElement !== activeNode) {
activeNode.focus(); // TODO: verify are we doing new focus event, if user has focus listener this might trigger it
}
}
function queueStateChanges(component, newState, callback) {
for (var stateKey in newState) {
component._pendingState[stateKey] = newState[stateKey];
}
if (!component._pendingSetState) {
component._pendingSetState = true;
applyState(component, false, callback);
} else {
var pendingState = component._pendingState;
var oldState = component.state;
component.state = Object.assign({}, oldState, pendingState);
component._pendingState = {};
}
}
function applyState(component, force, callback) {
if (!component._deferSetState || force) {
component._pendingSetState = false;
var pendingState = component._pendingState;
var oldState = component.state;
var nextState = Object.assign({}, oldState, pendingState);
component._pendingState = {};
var nextNode = component._updateComponent(oldState, nextState, component.props, component.props, force);
if (nextNode === NO_RENDER) {
nextNode = component._lastNode;
} else if (isNullOrUndefined(nextNode)) {
nextNode = createVPlaceholder();
}
var lastNode = component._lastNode;
var parentDom = lastNode.dom.parentNode;
var activeNode = getActiveNode();
var subLifecycle = new Lifecycle();
component._patch(lastNode, nextNode, parentDom, subLifecycle, component.context, component, null);
component._lastNode = nextNode;
component._componentToDOMNodeMap.set(component, nextNode.dom);
component._parentNode.dom = nextNode.dom;
subLifecycle.trigger();
if (!isNullOrUndefined(callback)) {
callback();
}
resetActiveNode(activeNode);
}
}
var Component = function Component(props) {
/** @type {object} */
this.props = props || {};
/** @type {object} */
this.state = {};
/** @type {object} */
this.refs = {};
this._blockSetState = false;
this._deferSetState = false;
this._pendingSetState = false;
this._pendingState = {};
this._parentNode = null;
this._lastNode = null;
this._unmounted = true;
this.context = {};
this._patch = null;
this._parentComponent = null;
this._componentToDOMNodeMap = null;
};
Component.prototype.render = function render () {
};
Component.prototype.forceUpdate = function forceUpdate (callback) {
if (this._unmounted) {
throw Error(noOp);
}
applyState(this, true, callback);
};
Component.prototype.setState = function setState (newState, callback) {
if (this._unmounted) {
throw Error(noOp);
}
if (this._blockSetState === false) {
queueStateChanges(this, newState, callback);
} else {
throw Error('Inferno Warning: Cannot update state via setState() in componentWillUpdate()');
}
};
Component.prototype.componentDidMount = function componentDidMount () {
};
Component.prototype.componentWillMount = function componentWillMount () {
};
Component.prototype.componentWillUnmount = function componentWillUnmount () {
};
Component.prototype.componentDidUpdate = function componentDidUpdate () {
};
Component.prototype.shouldComponentUpdate = function shouldComponentUpdate () {
return true;
};
Component.prototype.componentWillReceiveProps = function componentWillReceiveProps () {
};
Component.prototype.componentWillUpdate = function componentWillUpdate () {
};
Component.prototype.getChildContext = function getChildContext () {
};
Component.prototype._updateComponent = function _updateComponent (prevState, nextState, prevProps, nextProps, force) {
if (this._unmounted === true) {
this._unmounted = false;
return false;
}
if (!isNullOrUndefined(nextProps) && isNullOrUndefined(nextProps.children)) {
nextProps.children = prevProps.children;
}
if (prevProps !== nextProps || prevState !== nextState || force) {
if (prevProps !== nextProps) {
this._blockSetState = true;
this.componentWillReceiveProps(nextProps);
this._blockSetState = false;
}
var shouldUpdate = this.shouldComponentUpdate(nextProps, nextState);
if (shouldUpdate !== false) {
this._blockSetState = true;
this.componentWillUpdate(nextProps, nextState);
this._blockSetState = false;
this.props = nextProps;
this.state = nextState;
var node = this.render();
this.componentDidUpdate(prevProps, prevState);
return node;
}
}
return NO_RENDER;
};
var ASYNC_STATUS = {
pending: 'pending',
fulfilled: 'fulfilled',
rejected: 'rejected'
};
var Route = (function (Component) {
function Route(props) {
Component.call(this, props);
this.state = {
async: null
};
}
if ( Component ) Route.__proto__ = Component;
Route.prototype = Object.create( Component && Component.prototype );
Route.prototype.constructor = Route;
Route.prototype.async = function async () {
var this$1 = this;
var async = this.props.async;
if (async) {
this.setState({
async: { status: ASYNC_STATUS.pending }
});
async(this.props.params).then(function (value) {
this$1.setState({
async: {
status: ASYNC_STATUS.fulfilled,
value: value
}
});
}, this.reject).catch(this.reject);
}
};
Route.prototype.reject = function reject (value) {
this.setState({
async: {
status: ASYNC_STATUS.rejected,
value: value
}
});
};
Route.prototype.componentWillReceiveProps = function componentWillReceiveProps () {
this.async();
};
Route.prototype.componentWillMount = function componentWillMount () {
this.async();
};
Route.prototype.render = function render () {
var ref = this.props;
var component = ref.component;
var params = ref.params;
return createVNode().setTag(component).setAttrs({ params: params, async: this.state.async });
};
return Route;
}(Component));
var EMPTY$1 = {};
function segmentize(url) {
return strip(url).split('/');
}
function strip(url) {
return url.replace(/(^\/+|\/+$)/g, '');
}
function convertToHashbang(url) {
if (url.indexOf('#') === -1) {
url = '/';
} else {
var splitHashUrl = url.split('#!');
splitHashUrl.shift();
url = splitHashUrl.join('');
}
return url;
}
// Thanks goes to Preact for this function: https://github.com/developit/preact-router/blob/master/src/util.js#L4
function exec(url, route, opts) {
if ( opts === void 0 ) opts = EMPTY$1;
var reg = /(?:\?([^#]*))?(#.*)?$/,
c = url.match(reg),
matches = {},
ret;
if (c && c[1]) {
var p = c[1].split('&');
for (var i = 0; i < p.length; i++) {
var r = p[i].split('=');
matches[decodeURIComponent(r[0])] = decodeURIComponent(r.slice(1).join('='));
}
}
url = segmentize(url.replace(reg, ''));
route = segmentize(route || '');
var max = Math.max(url.length, route.length);
var hasWildcard = false;
for (var i$1 = 0; i$1 < max; i$1++) {
if (route[i$1] && route[i$1].charAt(0) === ':') {
var param = route[i$1].replace(/(^\:|[+*?]+$)/g, ''),
flags = (route[i$1].match(/[+*?]+$/) || EMPTY$1)[0] || '',
plus = ~flags.indexOf('+'),
star = ~flags.indexOf('*'),
val = url[i$1] || '';
if (!val && !star && (flags.indexOf('?') < 0 || plus)) {
ret = false;
break;
}
matches[param] = decodeURIComponent(val);
if (plus || star) {
matches[param] = url.slice(i$1).map(decodeURIComponent).join('/');<|fim▁hole|> else if (route[i$1] !== url[i$1] && !hasWildcard) {
if (route[i$1] === '*' && route.length === i$1 + 1) {
hasWildcard = true;
} else {
ret = false;
break;
}
}
}
if (opts.default !== true && ret === false) {
return false;
}
return matches;
}
function pathRankSort(a, b) {
var aAttr = a.attrs || EMPTY$1,
bAttr = b.attrs || EMPTY$1;
var diff = rank(bAttr.path) - rank(aAttr.path);
return diff || (bAttr.path.length - aAttr.path.length);
}
function rank(url) {
return (strip(url).match(/\/+/g) || '').length;
}
var Router = (function (Component) {
function Router(props) {
Component.call(this, props);
if (!props.history) {
throw new Error('Inferno Error: "inferno-router" Router components require a "history" prop passed.');
}
this._didRoute = false;
this.state = {
url: props.url || props.history.getCurrentUrl()
};
}
if ( Component ) Router.__proto__ = Component;
Router.prototype = Object.create( Component && Component.prototype );
Router.prototype.constructor = Router;
Router.prototype.getChildContext = function getChildContext () {
return {
history: this.props.history,
hashbang: this.props.hashbang
};
};
Router.prototype.componentWillMount = function componentWillMount () {
this.props.history.addRouter(this);
};
Router.prototype.componentWillUnmount = function componentWillUnmount () {
this.props.history.removeRouter(this);
};
Router.prototype.routeTo = function routeTo (url) {
this._didRoute = false;
this.setState({ url: url });
return this._didRoute;
};
Router.prototype.render = function render () {
var children = toArray(this.props.children);
var url = this.props.url || this.state.url;
var wrapperComponent = this.props.component;
var hashbang = this.props.hashbang;
return handleRoutes(children, url, hashbang, wrapperComponent, '');
};
return Router;
}(Component));
function toArray(children) {
return isArray(children) ? children : (children ? [children] : children);
}
function handleRoutes(routes, url, hashbang, wrapperComponent, lastPath) {
routes.sort(pathRankSort);
for (var i = 0; i < routes.length; i++) {
var route = routes[i];
var ref = route.attrs;
var path = ref.path;
var fullPath = lastPath + path;
var params = exec(hashbang ? convertToHashbang(url) : url, fullPath);
var children = toArray(route.children);
if (children) {
var subRoute = handleRoutes(children, url, hashbang, wrapperComponent, fullPath);
if (!isNull(subRoute)) {
return subRoute;
}
}
if (params) {
if (wrapperComponent) {
return createVNode().setTag(wrapperComponent).setChildren(route).setAttrs({
params: params
});
}
return route.setAttrs(Object.assign({}, { params: params }, route.attrs));
}
}
return !lastPath && wrapperComponent ? createVNode().setTag(wrapperComponent) : null;
}
function Link(ref, ref$1) {
var to = ref.to;
var children = ref.children;
var hashbang = ref$1.hashbang;
var history = ref$1.history;
return (createVNode().setAttrs({
href: hashbang ? history.getHashbangRoot() + convertToHashbang('#!' + to) : to
}).setTag('a').setChildren(children));
}
var routers = [];
function getCurrentUrl() {
var url = typeof location !== 'undefined' ? location : EMPTY;
return ("" + (url.pathname || '') + (url.search || '') + (url.hash || ''));
}
function getHashbangRoot() {
var url = typeof location !== 'undefined' ? location : EMPTY;
return ("" + (url.protocol + '//' || '') + (url.host || '') + (url.pathname || '') + (url.search || '') + "#!");
}
function routeTo(url) {
var didRoute = false;
for (var i = 0; i < routers.length; i++) {
if (routers[i].routeTo(url) === true) {
didRoute = true;
}
}
return didRoute;
}
if (isBrowser) {
window.addEventListener('popstate', function () { return routeTo(getCurrentUrl()); });
}
var browserHistory = {
addRouter: function addRouter(router) {
routers.push(router);
},
removeRouter: function removeRouter(router) {
routers.splice(routers.indexOf(router), 1);
},
getCurrentUrl: getCurrentUrl,
getHashbangRoot: getHashbangRoot
};
var index = {
Route: Route,
Router: Router,
Link: Link,
browserHistory: browserHistory
};
return index;
}));<|fim▁end|> | break;
}
} |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from .responses import InstanceMetadataResponse
url_bases = ["http://169.254.169.254"]
instance_metadata = InstanceMetadataResponse()
<|fim▁hole|><|fim▁end|> | url_paths = {"{0}/(?P<path>.+)": instance_metadata.metadata_response} |
<|file_name|>customwidget.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies).
## Contact: http://www.qt-project.org/legal
##
## This file is part of the test suite of the Qt Toolkit.
##
## $QT_BEGIN_LICENSE:LGPL$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and Digia. For licensing terms and
## conditions see http://qt.digia.com/licensing. For further information
## use the contact form at http://qt.digia.com/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 as published by the Free Software
## Foundation and appearing in the file LICENSE.LGPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU Lesser General Public License version 2.1 requirements
## will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, Digia gives you certain additional
## rights. These rights are described in the Digia Qt LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 3.0 as published by the Free Software
## Foundation and appearing in the file LICENSE.GPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU General Public License version 3.0 requirements will be
## met: http://www.gnu.org/copyleft/gpl.html.
##
##
## $QT_END_LICENSE$
##
#############################################################################
import os, sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
class CustomWidget(QWidget):
def __init__(self, parent, fake = False):
QWidget.__init__(self, parent)
gradient = QLinearGradient(QPointF(0, 0), QPointF(100.0, 100.0))
baseColor = QColor(0xa6, 0xce, 0x39, 0x7f)
gradient.setColorAt(0.0, baseColor.light(150))
gradient.setColorAt(0.75, baseColor.light(75))
self.brush = QBrush(gradient)
self.fake = fake
self.fakeBrush = QBrush(Qt.red, Qt.DiagCrossPattern)
qtPath = QPainterPath()
qtPath.setFillRule(Qt.OddEvenFill)
qtPath.moveTo(-45.0, -20.0)
qtPath.lineTo(0.0, -45.0)
qtPath.lineTo(45.0, -20.0)
qtPath.lineTo(45.0, 45.0)
qtPath.lineTo(-45.0, 45.0)
qtPath.lineTo(-45.0, -20.0)
qtPath.closeSubpath()
qtPath.moveTo(15.0, 5.0)<|fim▁hole|> qtPath.moveTo(-35.0, -15.0)
qtPath.closeSubpath()
qtPath.lineTo(-10.0, -15.0)
qtPath.lineTo(-10.0, 10.0)
qtPath.lineTo(-35.0, 10.0)
qtPath.lineTo(-35.0, -15.0)
qtPath.closeSubpath()
self.path = qtPath
def paintEvent(self, event):
painter = QPainter()
painter.begin(self)
painter.setRenderHint(QPainter.Antialiasing)
if self.fake:
painter.fillRect(event.rect(), QBrush(Qt.white))
painter.fillRect(event.rect(), self.fakeBrush)
painter.setBrush(self.brush)
painter.translate(60, 60)
painter.drawPath(self.path)
painter.end()
def sizeHint(self):
return QSize(120, 120)
def minimumSizeHint(self):
return QSize(120, 120)
if __name__ == "__main__":
try:
qt = sys.argv[1]
except IndexError:
qt = "4.1"
if qt != "4.0" and qt != "4.1":
sys.stderr.write("Usage: %s [4.0|4.1]\n" % sys.argv[0])
sys.exit(1)
app = QApplication(sys.argv)
exec_dir = os.path.split(os.path.abspath(sys.argv[0]))[0]
label = QLabel()
label.setPixmap(QPixmap(os.path.join(exec_dir, "background.png")))
layout = QGridLayout()
label.setLayout(layout)
if qt == "4.0":
layout.addWidget(CustomWidget(label), 0, 0, Qt.AlignCenter)
caption = QLabel("Opaque (Default)", label)
caption.setMargin(2)
layout.addWidget(caption, 1, 0, Qt.AlignCenter | Qt.AlignTop)
elif qt == "4.1":
layout.addWidget(CustomWidget(label), 0, 0, Qt.AlignCenter)
caption = QLabel("Contents Propagated (Default)", label)
caption.setAutoFillBackground(True)
caption.setMargin(2)
layout.addWidget(caption, 1, 0, Qt.AlignCenter | Qt.AlignTop)
if qt == "4.0":
contentsWidget = CustomWidget(label)
contentsWidget.setAttribute(Qt.WA_ContentsPropagated, True)
layout.addWidget(contentsWidget, 0, 1, Qt.AlignCenter)
caption = QLabel("With WA_ContentsPropagated set", label)
caption.setMargin(2)
layout.addWidget(caption, 1, 1, Qt.AlignCenter | Qt.AlignTop)
elif qt == "4.1":
autoFillWidget = CustomWidget(label)
autoFillWidget.setAutoFillBackground(True)
layout.addWidget(autoFillWidget, 0, 1, Qt.AlignCenter)
caption = QLabel("With autoFillBackground set", label)
caption.setAutoFillBackground(True)
caption.setMargin(2)
layout.addWidget(caption, 1, 1, Qt.AlignCenter | Qt.AlignTop)
if qt == "4.0":
noBackgroundWidget = CustomWidget(label, fake = True)
noBackgroundWidget.setAttribute(Qt.WA_NoBackground, True)
layout.addWidget(noBackgroundWidget, 0, 2, Qt.AlignCenter)
caption = QLabel("With WA_NoBackground set", label)
caption.setWordWrap(True)
caption.setMargin(2)
layout.addWidget(caption, 1, 2, Qt.AlignCenter | Qt.AlignTop)
elif qt == "4.1":
opaqueWidget = CustomWidget(label, fake = True)
opaqueWidget.setAttribute(Qt.WA_OpaquePaintEvent, True)
layout.addWidget(opaqueWidget, 0, 2, Qt.AlignCenter)
caption = QLabel("With WA_OpaquePaintEvent set", label)
caption.setAutoFillBackground(True)
caption.setMargin(2)
layout.addWidget(caption, 1, 2, Qt.AlignCenter | Qt.AlignTop)
if qt == "4.0":
label.setWindowTitle("Qt 4.0: Painting Custom Widgets")
elif qt == "4.1":
label.setWindowTitle("Qt 4.1: Painting Custom Widgets")
label.resize(404, 160)
label.show()
sys.exit(app.exec_())<|fim▁end|> | qtPath.lineTo(35.0, 5.0)
qtPath.lineTo(35.0, 40.0)
qtPath.lineTo(15.0, 40.0)
qtPath.lineTo(15.0, 5.0) |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/** @constructor */
function TaskManager() { }
cr.addSingletonGetter(TaskManager);
TaskManager.prototype = {
/**
* Handle window close.
* @this
*/
onClose: function() {
if (!this.disabled_) {
this.disabled_ = true;
commands.disableTaskManager();
}
},
/**
* Handles selection changes.
* This is also called when data of tasks are refreshed, even if selection
* has not been changed.
* @this
*/
onSelectionChange: function() {
var sm = this.selectionModel_;
var dm = this.dataModel_;
var selectedIndexes = sm.selectedIndexes;
var is_end_process_enabled = true;
if (selectedIndexes.length == 0)
is_end_process_enabled = false;
for (var i = 0; i < selectedIndexes.length; i++) {
var index = selectedIndexes[i];
var task = dm.item(index);
if (task['type'] == 'BROWSER')
is_end_process_enabled = false;
}
if (this.is_end_process_enabled_ != is_end_process_enabled) {
if (is_end_process_enabled)
$('kill-process').removeAttribute('disabled');
else
$('kill-process').setAttribute('disabled', 'true');
this.is_end_process_enabled_ = is_end_process_enabled;
}
},
/**
* Closes taskmanager dialog.
* After this function is called, onClose() will be called.
* @this
*/
close: function() {
window.close();
},
/**
* Sends commands to kill selected processes.
* @this
*/
killSelectedProcesses: function() {
var selectedIndexes = this.selectionModel_.selectedIndexes;
var dm = this.dataModel_;
var uniqueIds = [];
for (var i = 0; i < selectedIndexes.length; i++) {
var index = selectedIndexes[i];
var task = dm.item(index);
uniqueIds.push(task['uniqueId'][0]);
}
commands.killSelectedProcesses(uniqueIds);
},
/**
* Initializes taskmanager.
* @this
*/
initialize: function(dialogDom, opt) {
if (!dialogDom) {
console.log('ERROR: dialogDom is not defined.');
return;
}
measureTime.startInterval('Load.DOM');
this.opt_ = opt;
this.initialized_ = true;
this.elementsCache_ = {};
this.dialogDom_ = dialogDom;
this.document_ = dialogDom.ownerDocument;
this.localized_column_ = [];
for (var i = 0; i < DEFAULT_COLUMNS.length; i++) {
var column_label_id = DEFAULT_COLUMNS[i][1];
this.localized_column_[i] = loadTimeData.getString(column_label_id);
}
this.initElements_();
this.initColumnModel_();
this.selectionModel_ = new cr.ui.ListSelectionModel();
this.dataModel_ = new cr.ui.ArrayDataModel([]);
this.selectionModel_.addEventListener('change',
this.onSelectionChange.bind(this));
// Initializes compare functions for column sort.
var dm = this.dataModel_;
// List of columns to sort by its numerical value as opposed to the
// formatted value, e.g., 20480 vs. 20KB.
var COLUMNS_SORTED_BY_VALUE = [
'cpuUsage', 'physicalMemory', 'sharedMemory', 'privateMemory',
'networkUsage', 'webCoreImageCacheSize', 'webCoreScriptsCacheSize',
'webCoreCSSCacheSize', 'fps', 'videoMemory', 'sqliteMemoryUsed',
'goatsTeleported', 'v8MemoryAllocatedSize'];
for (var i = 0; i < DEFAULT_COLUMNS.length; i++) {
var columnId = DEFAULT_COLUMNS[i][0];
var compareFunc = (function() {
var columnIdToSort = columnId;
if (COLUMNS_SORTED_BY_VALUE.indexOf(columnId) != -1)
columnIdToSort += 'Value';
return function(a, b) {
var aValues = a[columnIdToSort];
var bValues = b[columnIdToSort];
var aValue = aValues && aValues[0] || 0;
var bvalue = bValues && bValues[0] || 0;
return dm.defaultValuesCompareFunction(aValue, bvalue);
};
})();
dm.setCompareFunction(columnId, compareFunc);
}
if (isColumnEnabled(DEFAULT_SORT_COLUMN))
dm.sort(DEFAULT_SORT_COLUMN, DEFAULT_SORT_DIRECTION);
this.initTable_();
commands.enableTaskManager();
// Populate the static localized strings.
i18nTemplate.process(this.document_, loadTimeData);
measureTime.recordInterval('Load.DOM');
measureTime.recordInterval('Load.Total');
loadDelayedIncludes(this);
},
/**
* Initializes the visibilities and handlers of the elements.
* This method is called by initialize().
* @private
* @this
*/
initElements_: function() {
// <if expr="pp_ifdef('chromeos')">
// The 'close-window' element exists only on ChromeOS.
// This <if ... /if> section is removed while flattening HTML if chrome is
// built as Desktop Chrome.
if (!this.opt_['isShowCloseButton'])
$('close-window').style.display = 'none';
$('close-window').addEventListener('click', this.close.bind(this));
// </if>
$('kill-process').addEventListener('click',
this.killSelectedProcesses.bind(this));
$('about-memory-link').addEventListener('click', commands.openAboutMemory);
},
/**
* Additional initialization of taskmanager. This function is called when
* the loading of delayed scripts finished.
* @this
*/
delayedInitialize: function() {
this.initColumnMenu_();
this.initTableMenu_();
var dm = this.dataModel_;
for (var i = 0; i < dm.length; i++) {
var processId = dm.item(i)['processId'][0];
for (var j = 0; j < DEFAULT_COLUMNS.length; j++) {
var columnId = DEFAULT_COLUMNS[j][0];
var row = dm.item(i)[columnId];
if (!row)
continue;
for (var k = 0; k < row.length; k++) {
var labelId = 'detail-' + columnId + '-pid' + processId + '-' + k;
var label = $(labelId);
// Initialize a context-menu, if the label exists and its context-
// menu is not initialized yet.
if (label && !label.contextMenu)
cr.ui.contextMenuHandler.setContextMenu(label,
this.tableContextMenu_);
}
}
}
this.isFinishedInitDelayed_ = true;
this.table_.redraw();
},
initColumnModel_: function() {
var table_columns = new Array();
for (var i = 0; i < DEFAULT_COLUMNS.length; i++) {
var column = DEFAULT_COLUMNS[i];
var columnId = column[0];<|fim▁hole|> continue;
table_columns.push(new cr.ui.table.TableColumn(columnId,
this.localized_column_[i],
column[2]));
}
for (var i = 0; i < table_columns.length; i++) {
table_columns[i].renderFunction = this.renderColumn_.bind(this);
}
this.columnModel_ = new cr.ui.table.TableColumnModel(table_columns);
},
initColumnMenu_: function() {
this.column_menu_commands_ = [];
this.commandsElement_ = this.document_.createElement('commands');
this.document_.body.appendChild(this.commandsElement_);
this.columnSelectContextMenu_ = this.document_.createElement('menu');
for (var i = 0; i < DEFAULT_COLUMNS.length; i++) {
var column = DEFAULT_COLUMNS[i];
// Creates command element to receive event.
var command = this.document_.createElement('command');
command.id = COMMAND_CONTEXTMENU_COLUMN_PREFIX + '-' + column[0];
cr.ui.Command.decorate(command);
this.column_menu_commands_[command.id] = command;
this.commandsElement_.appendChild(command);
// Creates menuitem element.
var item = this.document_.createElement('menuitem');
item.command = command;
command.menuitem = item;
item.textContent = this.localized_column_[i];
if (isColumnEnabled(column[0]))
item.setAttributeNode(this.document_.createAttribute('checked'));
this.columnSelectContextMenu_.appendChild(item);
}
this.document_.body.appendChild(this.columnSelectContextMenu_);
cr.ui.Menu.decorate(this.columnSelectContextMenu_);
cr.ui.contextMenuHandler.setContextMenu(this.table_.header,
this.columnSelectContextMenu_);
cr.ui.contextMenuHandler.setContextMenu(this.table_.list,
this.columnSelectContextMenu_);
this.document_.addEventListener('command', this.onCommand_.bind(this));
this.document_.addEventListener('canExecute',
this.onCommandCanExecute_.bind(this));
},
initTableMenu_: function() {
this.table_menu_commands_ = [];
this.tableContextMenu_ = this.document_.createElement('menu');
var addMenuItem = function(tm, command_id, string_id) {
// Creates command element to receive event.
var command = tm.document_.createElement('command');
command.id = COMMAND_CONTEXTMENU_TABLE_PREFIX + '-' + command_id;
cr.ui.Command.decorate(command);
tm.table_menu_commands_[command.id] = command;
tm.commandsElement_.appendChild(command);
// Creates menuitem element.
var item = tm.document_.createElement('menuitem');
item.command = command;
command.menuitem = item;
item.textContent = loadTimeData.getString(string_id);
tm.tableContextMenu_.appendChild(item);
};
addMenuItem(this, 'inspect', 'inspect');
addMenuItem(this, 'activate', 'activate');
this.document_.body.appendChild(this.tableContextMenu_);
cr.ui.Menu.decorate(this.tableContextMenu_);
},
initTable_: function() {
if (!this.dataModel_ || !this.selectionModel_ || !this.columnModel_) {
console.log('ERROR: some models are not defined.');
return;
}
this.table_ = this.dialogDom_.querySelector('.detail-table');
cr.ui.Table.decorate(this.table_);
this.table_.dataModel = this.dataModel_;
this.table_.selectionModel = this.selectionModel_;
this.table_.columnModel = this.columnModel_;
// Expands height of row when a process has some tasks.
this.table_.fixedHeight = false;
this.table_.list.addEventListener('contextmenu',
this.onTableContextMenuOpened_.bind(this),
true);
// Sets custom row render function.
this.table_.setRenderFunction(this.getRow_.bind(this));
},
/**
* Returns a list item element of the list. This method trys to reuse the
* cached element, or creates a new element.
* @return {cr.ui.ListItem} list item element which contains the given data.
* @private
* @this
*/
getRow_: function(data, table) {
// Trys to reuse the cached row;
var listItemElement = this.renderRowFromCache_(data, table);
if (listItemElement)
return listItemElement;
// Initializes the cache.
var pid = data['processId'][0];
this.elementsCache_[pid] = {
listItem: null,
cell: [],
icon: [],
columns: {}
};
// Create new row.
return this.renderRow_(data, table);
},
/**
* Returns a list item element with re-using the previous cached element, or
* returns null if failed.
* @return {cr.ui.ListItem} cached un-used element to be reused.
* @private
* @this
*/
renderRowFromCache_: function(data, table) {
var pid = data['processId'][0];
// Checks whether the cache exists or not.
var cache = this.elementsCache_[pid];
if (!cache)
return null;
var listItemElement = cache.listItem;
var cm = table.columnModel;
// Checks whether the number of columns has been changed or not.
if (cache.cachedColumnSize != cm.size)
return null;
// Checks whether the number of childlen tasks has been changed or not.
if (cache.cachedChildSize != data['uniqueId'].length)
return null;
// Updates informations of the task if necessary.
for (var i = 0; i < cm.size; i++) {
var columnId = cm.getId(i);
var columnData = data[columnId];
var oldColumnData = listItemElement.data[columnId];
var columnElements = cache.columns[columnId];
if (!columnData || !oldColumnData || !columnElements)
return null;
// Sets new width of the cell.
var cellElement = cache.cell[i];
cellElement.style.width = cm.getWidth(i) + '%';
for (var j = 0; j < columnData.length; j++) {
// Sets the new text, if the text has been changed.
if (oldColumnData[j] != columnData[j]) {
var textElement = columnElements[j];
textElement.textContent = columnData[j];
}
}
}
// Updates icon of the task if necessary.
var oldIcons = listItemElement.data['icon'];
var newIcons = data['icon'];
if (oldIcons && newIcons) {
for (var j = 0; j < columnData.length; j++) {
var oldIcon = oldIcons[j];
var newIcon = newIcons[j];
if (oldIcon != newIcon) {
var iconElement = cache.icon[j];
iconElement.src = newIcon;
}
}
}
listItemElement.data = data;
// Removes 'selected' and 'lead' attributes.
listItemElement.removeAttribute('selected');
listItemElement.removeAttribute('lead');
return listItemElement;
},
/**
* Create a new list item element.
* @return {cr.ui.ListItem} created new list item element.
* @private
* @this
*/
renderRow_: function(data, table) {
var pid = data['processId'][0];
var cm = table.columnModel;
var listItem = new cr.ui.ListItem({label: ''});
listItem.className = 'table-row';
if (this.opt_.isBackgroundMode && data.isBackgroundResource)
listItem.className += ' table-background-row';
for (var i = 0; i < cm.size; i++) {
var cell = document.createElement('div');
cell.style.width = cm.getWidth(i) + '%';
cell.className = 'table-row-cell';
cell.id = 'column-' + pid + '-' + cm.getId(i);
cell.appendChild(
cm.getRenderFunction(i).call(null, data, cm.getId(i), table));
listItem.appendChild(cell);
// Stores the cell element to the dictionary.
this.elementsCache_[pid].cell[i] = cell;
}
// Specifies the height of the row. The height of each row is
// 'num_of_tasks * HEIGHT_OF_TASK' px.
listItem.style.height = (data['uniqueId'].length * HEIGHT_OF_TASK) + 'px';
listItem.data = data;
// Stores the list item element, the number of columns and the number of
// childlen.
this.elementsCache_[pid].listItem = listItem;
this.elementsCache_[pid].cachedColumnSize = cm.size;
this.elementsCache_[pid].cachedChildSize = data['uniqueId'].length;
return listItem;
},
/**
* Create a new element of the cell.
* @return {HTMLDIVElement} created cell
* @private
* @this
*/
renderColumn_: function(entry, columnId, table) {
var container = this.document_.createElement('div');
container.className = 'detail-container-' + columnId;
var pid = entry['processId'][0];
var cache = [];
var cacheIcon = [];
if (entry && entry[columnId]) {
container.id = 'detail-container-' + columnId + '-pid' + entry.processId;
for (var i = 0; i < entry[columnId].length; i++) {
var label = document.createElement('div');
if (columnId == 'title') {
// Creates a page title element with icon.
var image = this.document_.createElement('img');
image.className = 'detail-title-image';
image.src = entry['icon'][i];
image.id = 'detail-title-icon-pid' + pid + '-' + i;
label.appendChild(image);
var text = this.document_.createElement('div');
text.className = 'detail-title-text';
text.id = 'detail-title-text-pid' + pid + '-' + i;
text.textContent = entry['title'][i];
label.appendChild(text);
// Chech if the delayed scripts (included in includes.js) have been
// loaded or not. If the delayed scripts ware not loaded yet, a
// context menu could not be initialized. In such case, it will be
// initialized at delayedInitialize() just after loading of delayed
// scripts instead of here.
if (this.isFinishedInitDelayed_)
cr.ui.contextMenuHandler.setContextMenu(label,
this.tableContextMenu_);
label.addEventListener('dblclick', (function(uniqueId) {
commands.activatePage(uniqueId);
}).bind(this, entry['uniqueId'][i]));
label.data = entry;
label.index_in_group = i;
cache[i] = text;
cacheIcon[i] = image;
} else {
label.textContent = entry[columnId][i];
cache[i] = label;
}
label.id = 'detail-' + columnId + '-pid' + pid + '-' + i;
label.className = 'detail-' + columnId + ' pid' + pid;
container.appendChild(label);
}
this.elementsCache_[pid].columns[columnId] = cache;
if (columnId == 'title')
this.elementsCache_[pid].icon = cacheIcon;
}
return container;
},
/**
* Updates the task list with the supplied task.
* @private
* @this
*/
processTaskChange: function(task) {
var dm = this.dataModel_;
var sm = this.selectionModel_;
if (!dm || !sm) return;
this.table_.list.startBatchUpdates();
sm.beginChange();
var type = task.type;
var start = task.start;
var length = task.length;
var tasks = task.tasks;
// We have to store the selected pids and restore them after
// splice(), because it might replace some items but the replaced
// items would lose the selection.
var oldSelectedIndexes = sm.selectedIndexes;
// Create map of selected PIDs.
var selectedPids = {};
for (var i = 0; i < oldSelectedIndexes.length; i++) {
var item = dm.item(oldSelectedIndexes[i]);
if (item) selectedPids[item['processId'][0]] = true;
}
var args = tasks.slice();
args.unshift(start, dm.length);
dm.splice.apply(dm, args);
// Create new array of selected indexes from map of old PIDs.
var newSelectedIndexes = [];
for (var i = 0; i < dm.length; i++) {
if (selectedPids[dm.item(i)['processId'][0]])
newSelectedIndexes.push(i);
}
sm.selectedIndexes = newSelectedIndexes;
var pids = [];
for (var i = 0; i < dm.length; i++) {
pids.push(dm.item(i)['processId'][0]);
}
// Sweeps unused caches, which elements no longer exist on the list.
for (var pid in this.elementsCache_) {
if (pids.indexOf(pid) == -1)
delete this.elementsCache_[pid];
}
sm.endChange();
this.table_.list.endBatchUpdates();
},
/**
* Respond to a command being executed.
* @this
*/
onCommand_: function(event) {
var command = event.command;
var command_id = command.id.split('-', 2);
var main_command = command_id[0];
var sub_command = command_id[1];
if (main_command == COMMAND_CONTEXTMENU_COLUMN_PREFIX) {
this.onColumnContextMenu_(sub_command, command);
} else if (main_command == COMMAND_CONTEXTMENU_TABLE_PREFIX) {
var target_unique_id = this.currentContextMenuTarget_;
if (!target_unique_id)
return;
if (sub_command == 'inspect')
commands.inspect(target_unique_id);
else if (sub_command == 'activate')
commands.activatePage(target_unique_id);
this.currentContextMenuTarget_ = undefined;
}
},
onCommandCanExecute_: function(event) {
event.canExecute = true;
},
/**
* Store resourceIndex of target resource of context menu, because resource
* will be replaced when it is refreshed.
* @this
*/
onTableContextMenuOpened_: function(e) {
if (!this.isFinishedInitDelayed_)
return;
var mc = this.table_menu_commands_;
var inspect_menuitem =
mc[COMMAND_CONTEXTMENU_TABLE_PREFIX + '-inspect'].menuitem;
var activate_menuitem =
mc[COMMAND_CONTEXTMENU_TABLE_PREFIX + '-activate'].menuitem;
// Disabled by default.
inspect_menuitem.disabled = true;
activate_menuitem.disabled = true;
var target = e.target;
for (;; target = target.parentNode) {
if (!target) return;
var classes = target.classList;
if (classes &&
Array.prototype.indexOf.call(classes, 'detail-title') != -1) break;
}
var index_in_group = target.index_in_group;
// Sets the uniqueId for current target page under the mouse corsor.
this.currentContextMenuTarget_ = target.data['uniqueId'][index_in_group];
// Enables if the page can be inspected.
if (target.data['canInspect'][index_in_group])
inspect_menuitem.disabled = false;
// Enables if the page can be activated.
if (target.data['canActivate'][index_in_group])
activate_menuitem.disabled = false;
},
onColumnContextMenu_: function(columnId, command) {
var menuitem = command.menuitem;
var checkedItemCount = 0;
var checked = isColumnEnabled(columnId);
// Leaves a item visible when user tries making invisible but it is the
// last one.
var enabledColumns = getEnabledColumns();
for (var id in enabledColumns) {
if (enabledColumns[id])
checkedItemCount++;
}
if (checkedItemCount == 1 && checked)
return;
// Toggles the visibility of the column.
var newChecked = !checked;
menuitem.checked = newChecked;
setColumnEnabled(columnId, newChecked);
this.initColumnModel_();
this.table_.columnModel = this.columnModel_;
this.table_.redraw();
},
};
// |taskmanager| has been declared in preload.js.
taskmanager = TaskManager.getInstance();
function init() {
var params = parseQueryParams(window.location);
var opt = {};
opt['isBackgroundMode'] = params.background;
opt['isShowCloseButton'] = params.showclose;
taskmanager.initialize(document.body, opt);
}
document.addEventListener('DOMContentLoaded', init);
document.addEventListener('Close', taskmanager.onClose.bind(taskmanager));<|fim▁end|> | if (!isColumnEnabled(columnId)) |
<|file_name|>bootstrap.js<|end_file_name|><|fim▁begin|>// "node scripts/create-package-app-test.js && node packages/app-test/synchronize.js && node packages/react-boilerplate-app-scripts/scripts/link-react-boilerplates.js && lerna bootstrap",
'use strict';
require('./create-package-app-test.js');
require('../packages/app-test/synchronize.js');
require('../packages/react-boilerplate-app-scripts/scripts/link-react-boilerplates.js');
const fs = require('fs-extra');
const path = require('path');
const execSync = require('child_process').execSync;
try {
//begin----加上packages/app-test
const lernaJson = require('../lerna.json');
const packagesFolderName = 'packages/app-test';
if (lernaJson.packages.indexOf(packagesFolderName) === -1) {
//可能中途ctr+c,导致包名没被删除
lernaJson.packages.push(packagesFolderName);
}
fs.writeFileSync(
path.resolve(__dirname, '../lerna.json'),
JSON.stringify(lernaJson, null, 2)
);
//end----加上packages/app-test
execSync('npm run lerna-bootstrap', { stdio: 'inherit' });
//begin----移除packages/app-test,发布的时候不会发布这个的,只是用来测试
if (lernaJson.packages.indexOf(packagesFolderName) !== -1) {
lernaJson.packages.splice(
lernaJson.packages.indexOf(packagesFolderName),
1
);
}
fs.writeFileSync(
path.resolve(__dirname, '../lerna.json'),
JSON.stringify(lernaJson, null, 2)
);
//end----移除packages/app-test,发布的时候不会发布这个的,只是用来测试
} catch (e) {
console.log(e);<|fim▁hole|><|fim▁end|> | } |
<|file_name|>test_cron.py<|end_file_name|><|fim▁begin|>from unittest import mock
from olympia.amo.cron import gc
from olympia.amo.tests import TestCase
from olympia.files.models import FileUpload
@mock.patch('olympia.amo.cron.storage')
class TestGC(TestCase):
def test_file_uploads_deletion(self, storage_mock):
fu_new = FileUpload.objects.create(path='/tmp/new', name='new')
fu_new.update(created=self.days_ago(6))
fu_old = FileUpload.objects.create(path='/tmp/old', name='old')
fu_old.update(created=self.days_ago(8))<|fim▁hole|> gc()
assert FileUpload.objects.count() == 1
assert storage_mock.delete.call_count == 1
assert storage_mock.delete.call_args[0][0] == fu_old.path
def test_file_uploads_deletion_no_path_somehow(self, storage_mock):
fu_old = FileUpload.objects.create(path='', name='foo')
fu_old.update(created=self.days_ago(8))
gc()
assert FileUpload.objects.count() == 0 # FileUpload was deleted.
assert storage_mock.delete.call_count == 0 # No path to delete.
def test_file_uploads_deletion_oserror(self, storage_mock):
fu_older = FileUpload.objects.create(path='/tmp/older', name='older')
fu_older.update(created=self.days_ago(300))
fu_old = FileUpload.objects.create(path='/tmp/old', name='old')
fu_old.update(created=self.days_ago(8))
storage_mock.delete.side_effect = OSError
gc()
# Even though delete() caused a OSError, we still deleted the
# FileUploads rows, and tried to delete each corresponding path on
# the filesystem.
assert FileUpload.objects.count() == 0
assert storage_mock.delete.call_count == 2
assert storage_mock.delete.call_args_list[0][0][0] == fu_older.path
assert storage_mock.delete.call_args_list[1][0][0] == fu_old.path<|fim▁end|> | |
<|file_name|>prev-games.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'
import flowRight from 'lodash-es/flowRight'
import {translate} from 'react-i18next'
import {bindActionCreators} from 'redux'
import {connect} from 'react-redux'
import {RouteComponentProps, withRouter} from 'react-router'
import {Container} from '../material/container'
import List from '@material-ui/core/List'
import Typography from '@material-ui/core/Typography'
import {NoPrevGamePlaceholder} from './no-prev-game-placeholder'
import {PrevGame} from './prev-game'
import {ResetModal} from './reset-modal'
import {GameModal} from './game-modal'
import {replaceCurrentGameAction} from '../score-input/actions/replace-current-game'
import {deleteGameAction} from './actions/delete-game'
import {showGameModalAction} from './actions/game-modal'
import {havePrevGamesSelector} from './selectors/have-prev-games'
import {reversedPrevGamesSelector} from './selectors/reversed-prev-games'
import {IRootState, Dispatch, ITranslateMixin} from '../types'
import classes from './prev-games.pcss'
const mapStateToProps = (state: IRootState) => ({
currentGame: state.currentGame,
havePrevGame: havePrevGamesSelector(state),
prevGames: reversedPrevGamesSelector(state)
})
const mapDispatchToProps = (dispatch: Dispatch) =>
bindActionCreators({
del: deleteGameAction,
show: showGameModalAction,
load: replaceCurrentGameAction
}, dispatch)
type stateType = ReturnType<typeof mapStateToProps>
type dispatchType = ReturnType<typeof mapDispatchToProps>
<|fim▁hole|> const {havePrevGame, prevGames, t} = this.props
if (havePrevGame) {
return (
<Container>
<div className={classes.prevGameContainer}>
<Typography variant="display1" gutterBottom>{t('Click on an entry for details')}</Typography>
<List>
{prevGames.map((prevGame, index) => (
<PrevGame key={`prev-game-${index}`} game={prevGame}
requestDetail={this.makeDetails(index)} requestDelete={this.makeDel(index)} />
))}
</List>
</div>
<GameModal />
<ResetModal />
</Container>
)
}
return <NoPrevGamePlaceholder/>
}
private makeDel(reversedIndex: number) {
return () => {
const {prevGames, del, currentGame, load} = this.props
const index = prevGames.length - reversedIndex - 1
const entry = prevGames[reversedIndex]
if (currentGame && currentGame.id === entry.id) {
load(null)
}
del(index)
}
}
private makeDetails(reversedIndex: number) {
return () => {
const {prevGames, show} = this.props
const index = prevGames.length - reversedIndex - 1
show(index)
}
}
}
export const PrevGames = flowRight(
withRouter,
translate(),
connect(mapStateToProps, mapDispatchToProps)
)(PrevGamesImpl)<|fim▁end|> | export class PrevGamesImpl extends React.Component {
public props: stateType & dispatchType & RouteComponentProps<any> & ITranslateMixin
public render() { |
<|file_name|>string_mutation_test.go<|end_file_name|><|fim▁begin|>package mutation
import (
"math/rand"
"testing"
"github.com/arl/evolve/generator"
"github.com/stretchr/testify/require"
)
func TestStringMutation(t *testing.T) {
rng := rand.New(rand.NewSource(99))
alphabet := "abcd"
sm := &String{
Alphabet: alphabet,
Probability: generator.ConstFloat64(0.5),
}
mut := New(sm)
individual1 := "abcd"
individual2 := "abab"
individual3 := "cccc"
population := []interface{}{individual1, individual2, individual3}
// Perform several iterations.
for i := 0; i < 20; i++ {
population = mut.Apply(population, rng)
require.Lenf(t, population, 3, "Population size changed after mutation: %v", len(population))
// Check that each individual is still valid
for _, individual := range population {
sind := individual.(string)
require.Lenf(t, sind, 4, "Individual size changed after mutation: %d", len(sind))
for _, c := range []byte(sind) {
require.Containsf(t, []byte(alphabet), c, "Mutation introduced invalid character: %v", c)
}
}
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from copy import deepcopy
from cms.admin.placeholderadmin import FrontendEditableAdminMixin, \
PlaceholderAdminMixin
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import get_user_model
from parler.admin import TranslatableAdmin<|fim▁hole|>from .settings import get_setting
try:
from admin_enhancer.admin import EnhancedModelAdminMixin
except ImportError:
class EnhancedModelAdminMixin(object):
pass
class BlogCategoryAdmin(EnhancedModelAdminMixin, TranslatableAdmin):
exclude = ['parent']
_fieldsets = [
(None, {
'fields': [('name', 'slug')]
}),
('Info', {
'fields': ([], ),
'classes': ('collapse',)
}),
]
def get_prepopulated_fields(self, request, obj=None):
return {'slug': ('name',)}
def get_queryset(self, request):
current_site = Site.objects.get_current()
return BlogCategory.objects.filter(sites=current_site)
def get_fieldsets(self, request, obj=None):
fsets = deepcopy(self._fieldsets)
if get_setting('MULTISITE'):
fsets[1][1]['fields'][0].append('sites')
return fsets
def save_related(self, request, form, formsets, change):
if not form.cleaned_data['sites']:
form.cleaned_data['sites'] = [Site.objects.get_current()]
super(BlogCategoryAdmin, self).save_related(
request, form, formsets, change)
class Media:
css = {
'all': ('%sdjangocms_blog/css/%s' % (settings.STATIC_URL,
'djangocms_blog_admin.css'),)
}
# from django.contrib import admin
# from django.utils.translation import ugettext_lazy as _
# class SitesFilter(admin.SimpleListFilter):
# title = _('Site')
# parameter_name = 'sites'
#
# def lookups(self, request, model_admin):
# return (('current_site', _('Current Site')),)
#
# def queryset(self, request, queryset):
# if self.value() == 'current_site':
# return queryset.filter(sites__in=[Site.objects.get_current()])
# else:
# return queryset
class PostAdmin(EnhancedModelAdminMixin, FrontendEditableAdminMixin,
PlaceholderAdminMixin, TranslatableAdmin):
list_display = ['title', 'author', 'date_published', 'date_published_end']
# list_filter = (SitesFilter,)
date_hierarchy = 'date_published'
raw_id_fields = ['author']
frontend_editable_fields = ('title', 'abstract', 'post_text')
enhance_exclude = ('main_image', 'tags')
_fieldsets = [
(None, {
'fields': [('title', 'categories', 'publish')]
}),
('Info', {
'fields': (['slug', 'tags'],
('date_published', 'date_published_end', 'enable_comments')),
'classes': ('collapse',)
}),
('Images', {
'fields': (('main_image', 'main_image_thumbnail', 'main_image_full'),),
'classes': ('collapse',)
}),
('SEO', {
'fields': [('meta_description', 'meta_title', 'meta_keywords')],
'classes': ('collapse',)
}),
]
def formfield_for_dbfield(self, db_field, **kwargs):
field = super(PostAdmin, self).formfield_for_dbfield(db_field, **kwargs)
if db_field.name == 'meta_description':
original_attrs = field.widget.attrs
original_attrs['maxlength'] = 160
field.widget = forms.TextInput(original_attrs)
elif db_field.name == 'meta_title':
field.max_length = 70
return field
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
if db_field.name == "categories":
kwargs["queryset"] = BlogCategory.objects.filter(
sites=Site.objects.get_current())
return super(PostAdmin, self).formfield_for_manytomany(
db_field, request, **kwargs)
def get_fieldsets(self, request, obj=None):
fsets = deepcopy(self._fieldsets)
if get_setting('USE_ABSTRACT'):
fsets[0][1]['fields'].append('abstract')
if not get_setting('USE_PLACEHOLDER'):
fsets[0][1]['fields'].append('post_text')
if get_setting('MULTISITE'):
fsets[1][1]['fields'][0].append('sites')
if request.user.is_superuser:
fsets[1][1]['fields'][0].append('author')
return fsets
def get_prepopulated_fields(self, request, obj=None):
return {'slug': ('title',)}
def get_queryset(self, request):
current_site = Site.objects.get_current()
return Post.objects.filter(sites=current_site)
def save_model(self, request, obj, form, change):
if not obj.author_id and get_setting('AUTHOR_DEFAULT'):
if get_setting('AUTHOR_DEFAULT') is True:
user = request.user
else:
user = get_user_model().objects.get(username=get_setting('AUTHOR_DEFAULT'))
obj.author = user
super(PostAdmin, self).save_model(request, obj, form, change)
def save_related(self, request, form, formsets, change):
if not form.cleaned_data['sites']:
form.cleaned_data['sites'] = [Site.objects.get_current()]
super(PostAdmin, self).save_related(request, form, formsets, change)
class Media:
css = {
'all': ('%sdjangocms_blog/css/%s' % (settings.STATIC_URL,
'djangocms_blog_admin.css'),)
}
admin.site.register(BlogCategory, BlogCategoryAdmin)
admin.site.register(Post, PostAdmin)<|fim▁end|> | from django.contrib.sites.models import Site
from .models import BlogCategory, Post |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>(function (root, factory) {
if (typeof define === 'function' && define.amd) {
define([], factory);
} else if (typeof module === 'object' && module.exports) {
module.exports = factory();
} else {
root.C = factory();
}
}(this, function () {
function runAndWaitOn(func) {
return function (value) {
var handlerRes = func(value);
if (handlerRes && typeof handlerRes.then === 'function') {
return handlerRes.then(function() {
return value;
});
}
return value;
};
}
function C (handler, optionalPromise, onFulfilled, onRejected){
var promise = optionalPromise ? optionalPromise.then(wrap(onFulfilled), wrap(onRejected)) : new Promise(handler);
var _u = this;
function wrap(func){
return typeof func === 'function' ? function() {
var res = func.apply(undefined, arguments);
if (res === _u) {
throw new TypeError('promise resolution value can`t be promise itself')
}
<|fim▁hole|> function _catch(){
var handler;
var constructor;
switch (arguments.length) {
case 2:
constructor = arguments[0];
handler = arguments[1];
break;
case 1:
handler = arguments[0];
break;
default:
throw new TypeError('Usage: .catch(constructor, handler) or .catch(handler)');
}
return new C(null, promise, null, function (val) {
var shouldBeCaught = typeof constructor === 'undefined' || val instanceof constructor;
if (shouldBeCaught) {
return handler.apply(this, arguments);
}
throw val;
});
}
function _finally(func) {
return new C(null, promise, runAndWaitOn(func), runAndWaitOn(func));
}
function _spread(func) {
return new C(null, promise, function (arr) {
if (!Array.isArray(arr)) {
return func.call(null, arr);
}
return func.apply(null, arr);
});
}
function _then(onFulfilled, onRejected) {
return new C(null, promise, onFulfilled, onRejected);
}
function tap(func) {
return new C(null, promise, runAndWaitOn(func));
}
Object.defineProperties(this, {
catch: {
value: _catch
},
finally: {
value: _finally
},
spread: {
value: _spread
},
tap: {
value: tap
},
then: {
value: _then
}
});
}
C.resolve = function resolve(value) {
return new C(function (resolve) {
resolve(value);
});
}
C.reject = function reject(value) {
return new C(function (resolve, reject) {
reject(value);
});
}
C.all = function all(arr) {
return C.resolve(Promise.all(arr));
}
return C;
}));<|fim▁end|> | return res;
} : undefined;
}
|
<|file_name|>Nodes.py<|end_file_name|><|fim▁begin|>#
# Parse tree nodes
#
import cython
cython.declare(sys=object, os=object, time=object, copy=object,
Builtin=object, error=object, warning=object, Naming=object, PyrexTypes=object,
py_object_type=object, ModuleScope=object, LocalScope=object, ClosureScope=object, \
StructOrUnionScope=object, PyClassScope=object, CClassScope=object,
CppClassScope=object, UtilityCode=object, EncodedString=object,
absolute_path_length=cython.Py_ssize_t)
import sys, os, time, copy, textwrap
import Builtin
from Errors import error, warning, InternalError, CompileError
import Naming
import PyrexTypes
import TypeSlots
from PyrexTypes import py_object_type, error_type, CTypedefType, CFuncType, cython_memoryview_ptr_type
from Symtab import ModuleScope, LocalScope, ClosureScope, \
StructOrUnionScope, PyClassScope, CClassScope, CppClassScope
from Cython.Utils import open_new_file, replace_suffix
from Code import UtilityCode, ClosureTempAllocator
from StringEncoding import EncodedString, escape_byte_string, split_string_literal
import Options
import DebugFlags
from itertools import chain
absolute_path_length = 0
def relative_position(pos):
"""
We embed the relative filename in the generated C file, since we
don't want to have to regnerate and compile all the source code
whenever the Python install directory moves (which could happen,
e.g,. when distributing binaries.)
INPUT:
a position tuple -- (absolute filename, line number column position)
OUTPUT:
relative filename
line number
AUTHOR: William Stein
"""
global absolute_path_length
if absolute_path_length==0:
absolute_path_length = len(os.path.abspath(os.getcwd()))
return (pos[0].get_filenametable_entry()[absolute_path_length+1:], pos[1])
def embed_position(pos, docstring):
if not Options.embed_pos_in_docstring:
return docstring
pos_line = u'File: %s (starting at line %s)' % relative_position(pos)
if docstring is None:
# unicode string
return EncodedString(pos_line)
# make sure we can encode the filename in the docstring encoding
# otherwise make the docstring a unicode string
encoding = docstring.encoding
if encoding is not None:
try:
encoded_bytes = pos_line.encode(encoding)
except UnicodeEncodeError:
encoding = None
if not docstring:
# reuse the string encoding of the original docstring
doc = EncodedString(pos_line)
else:
doc = EncodedString(pos_line + u'\n' + docstring)
doc.encoding = encoding
return doc
from Code import CCodeWriter
from types import FunctionType
def write_func_call(func):
def f(*args, **kwds):
if len(args) > 1 and isinstance(args[1], CCodeWriter):
# here we annotate the code with this function call
# but only if new code is generated
node, code = args[:2]
marker = ' /* %s -> %s.%s %s */' % (
' ' * code.call_level,
node.__class__.__name__,
func.__name__,
node.pos[1:])
pristine = code.buffer.stream.tell()
code.putln(marker)
start = code.buffer.stream.tell()
code.call_level += 4
res = func(*args, **kwds)
code.call_level -= 4
if start == code.buffer.stream.tell():
code.buffer.stream.seek(pristine)
else:
marker = marker.replace('->', '<-')
code.putln(marker)
return res
else:
return func(*args, **kwds)
return f
class VerboseCodeWriter(type):
# Set this as a metaclass to trace function calls in code.
# This slows down code generation and makes much larger files.
def __new__(cls, name, bases, attrs):
attrs = dict(attrs)
for mname, m in attrs.items():
if isinstance(m, FunctionType):
attrs[mname] = write_func_call(m)
return super(VerboseCodeWriter, cls).__new__(cls, name, bases, attrs)
class Node(object):
# pos (string, int, int) Source file position
# is_name boolean Is a NameNode
# is_literal boolean Is a ConstNode
if DebugFlags.debug_trace_code_generation:
__metaclass__ = VerboseCodeWriter
is_name = 0
is_none = 0
is_literal = 0
is_terminator = 0
temps = None
# All descandants should set child_attrs to a list of the attributes
# containing nodes considered "children" in the tree. Each such attribute
# can either contain a single node or a list of nodes. See Visitor.py.
child_attrs = None
cf_state = None
# This may be an additional (or 'actual') type that will be checked when
# this node is coerced to another type. This could be useful to set when
# the actual type to which it can coerce is known, but you want to leave
# the type a py_object_type
coercion_type = None
def __init__(self, pos, **kw):
self.pos = pos
self.__dict__.update(kw)
gil_message = "Operation"
nogil_check = None
def gil_error(self, env=None):
error(self.pos, "%s not allowed without gil" % self.gil_message)
cpp_message = "Operation"
def cpp_check(self, env):
if not env.is_cpp():
self.cpp_error()
def cpp_error(self):
error(self.pos, "%s only allowed in c++" % self.cpp_message)
def clone_node(self):
"""Clone the node. This is defined as a shallow copy, except for member lists
amongst the child attributes (from get_child_accessors) which are also
copied. Lists containing child nodes are thus seen as a way for the node
to hold multiple children directly; the list is not treated as a seperate
level in the tree."""
result = copy.copy(self)
for attrname in result.child_attrs:
value = getattr(result, attrname)
if isinstance(value, list):
setattr(result, attrname, [x for x in value])
return result
#
# There are 3 phases of parse tree processing, applied in order to
# all the statements in a given scope-block:
#
# (0) analyse_declarations
# Make symbol table entries for all declarations at the current
# level, both explicit (def, cdef, etc.) and implicit (assignment
# to an otherwise undeclared name).
#
# (1) analyse_expressions
# Determine the result types of expressions and fill in the
# 'type' attribute of each ExprNode. Insert coercion nodes into the
# tree where needed to convert to and from Python objects.
# Allocate temporary locals for intermediate results. Fill
# in the 'result_code' attribute of each ExprNode with a C code
# fragment.
#
# (2) generate_code
# Emit C code for all declarations, statements and expressions.
# Recursively applies the 3 processing phases to the bodies of
# functions.
#
def analyse_declarations(self, env):
pass
def analyse_expressions(self, env):
raise InternalError("analyse_expressions not implemented for %s" % \
self.__class__.__name__)
def generate_code(self, code):
raise InternalError("generate_code not implemented for %s" % \
self.__class__.__name__)
def annotate(self, code):
# mro does the wrong thing
if isinstance(self, BlockNode):
self.body.annotate(code)
def end_pos(self):
try:
return self._end_pos
except AttributeError:
pos = self.pos
if not self.child_attrs:
self._end_pos = pos
return pos
for attr in self.child_attrs:
child = getattr(self, attr)
# Sometimes lists, sometimes nodes
if child is None:
pass
elif isinstance(child, list):
for c in child:
pos = max(pos, c.end_pos())
else:
pos = max(pos, child.end_pos())
self._end_pos = pos
return pos
def dump(self, level=0, filter_out=("pos",), cutoff=100, encountered=None):
if cutoff == 0:
return "<...nesting level cutoff...>"
if encountered is None:
encountered = set()
if id(self) in encountered:
return "<%s (0x%x) -- already output>" % (self.__class__.__name__, id(self))
encountered.add(id(self))
def dump_child(x, level):
if isinstance(x, Node):
return x.dump(level, filter_out, cutoff-1, encountered)
elif isinstance(x, list):
return "[%s]" % ", ".join([dump_child(item, level) for item in x])
else:
return repr(x)
attrs = [(key, value) for key, value in self.__dict__.items() if key not in filter_out]
if len(attrs) == 0:
return "<%s (0x%x)>" % (self.__class__.__name__, id(self))
else:
indent = " " * level
res = "<%s (0x%x)\n" % (self.__class__.__name__, id(self))
for key, value in attrs:
res += "%s %s: %s\n" % (indent, key, dump_child(value, level + 1))
res += "%s>" % indent
return res
class CompilerDirectivesNode(Node):
"""
Sets compiler directives for the children nodes
"""
# directives {string:value} A dictionary holding the right value for
# *all* possible directives.
# body Node
child_attrs = ["body"]
def analyse_declarations(self, env):
old = env.directives
env.directives = self.directives
self.body.analyse_declarations(env)
env.directives = old
def analyse_expressions(self, env):
old = env.directives
env.directives = self.directives
self.body.analyse_expressions(env)
env.directives = old
def generate_function_definitions(self, env, code):
env_old = env.directives
code_old = code.globalstate.directives
code.globalstate.directives = self.directives
self.body.generate_function_definitions(env, code)
env.directives = env_old
code.globalstate.directives = code_old
def generate_execution_code(self, code):
old = code.globalstate.directives
code.globalstate.directives = self.directives
self.body.generate_execution_code(code)
code.globalstate.directives = old
def annotate(self, code):
old = code.globalstate.directives
code.globalstate.directives = self.directives
self.body.annotate(code)
code.globalstate.directives = old
class BlockNode(object):
# Mixin class for nodes representing a declaration block.
def generate_cached_builtins_decls(self, env, code):
entries = env.global_scope().undeclared_cached_builtins
for entry in entries:
code.globalstate.add_cached_builtin_decl(entry)
del entries[:]
def generate_lambda_definitions(self, env, code):
for node in env.lambda_defs:
node.generate_function_definitions(env, code)
class StatListNode(Node):
# stats a list of StatNode
child_attrs = ["stats"]
def create_analysed(pos, env, *args, **kw):
node = StatListNode(pos, *args, **kw)
return node # No node-specific analysis necesarry
create_analysed = staticmethod(create_analysed)
def analyse_declarations(self, env):
#print "StatListNode.analyse_declarations" ###
for stat in self.stats:
stat.analyse_declarations(env)
def analyse_expressions(self, env):
#print "StatListNode.analyse_expressions" ###
for stat in self.stats:
stat.analyse_expressions(env)
def generate_function_definitions(self, env, code):
#print "StatListNode.generate_function_definitions" ###
for stat in self.stats:
stat.generate_function_definitions(env, code)
def generate_execution_code(self, code):
#print "StatListNode.generate_execution_code" ###
for stat in self.stats:
code.mark_pos(stat.pos)
stat.generate_execution_code(code)
def annotate(self, code):
for stat in self.stats:
stat.annotate(code)
class StatNode(Node):
#
# Code generation for statements is split into the following subphases:
#
# (1) generate_function_definitions
# Emit C code for the definitions of any structs,
# unions, enums and functions defined in the current
# scope-block.
#
# (2) generate_execution_code
# Emit C code for executable statements.
#
def generate_function_definitions(self, env, code):
pass
def generate_execution_code(self, code):
raise InternalError("generate_execution_code not implemented for %s" % \
self.__class__.__name__)
class CDefExternNode(StatNode):
# include_file string or None
# body StatNode
child_attrs = ["body"]
def analyse_declarations(self, env):
if self.include_file:
env.add_include_file(self.include_file)
old_cinclude_flag = env.in_cinclude
env.in_cinclude = 1
self.body.analyse_declarations(env)
env.in_cinclude = old_cinclude_flag
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
pass
def annotate(self, code):
self.body.annotate(code)
class CDeclaratorNode(Node):
# Part of a C declaration.
#
# Processing during analyse_declarations phase:
#
# analyse
# Returns (name, type) pair where name is the
# CNameDeclaratorNode of the name being declared
# and type is the type it is being declared as.
#
# calling_convention string Calling convention of CFuncDeclaratorNode
# for which this is a base
child_attrs = []
calling_convention = ""
class CNameDeclaratorNode(CDeclaratorNode):
# name string The Cython name being declared
# cname string or None C name, if specified
# default ExprNode or None the value assigned on declaration
child_attrs = ['default']
default = None
def analyse(self, base_type, env, nonempty = 0):
if nonempty and self.name == '':
# May have mistaken the name for the type.
if base_type.is_ptr or base_type.is_array or base_type.is_buffer:
error(self.pos, "Missing argument name")
elif base_type.is_void:
error(self.pos, "Use spam() rather than spam(void) to declare a function with no arguments.")
else:
self.name = base_type.declaration_code("", for_display=1, pyrex=1)
base_type = py_object_type
self.type = base_type
return self, base_type
class CPtrDeclaratorNode(CDeclaratorNode):
# base CDeclaratorNode
child_attrs = ["base"]
def analyse(self, base_type, env, nonempty = 0):
if base_type.is_pyobject:
error(self.pos,
"Pointer base type cannot be a Python object")
ptr_type = PyrexTypes.c_ptr_type(base_type)
return self.base.analyse(ptr_type, env, nonempty = nonempty)
class CReferenceDeclaratorNode(CDeclaratorNode):
# base CDeclaratorNode
child_attrs = ["base"]
def analyse(self, base_type, env, nonempty = 0):
if base_type.is_pyobject:
error(self.pos,
"Reference base type cannot be a Python object")
ref_type = PyrexTypes.c_ref_type(base_type)
return self.base.analyse(ref_type, env, nonempty = nonempty)
class CArrayDeclaratorNode(CDeclaratorNode):
# base CDeclaratorNode
# dimension ExprNode
child_attrs = ["base", "dimension"]
def analyse(self, base_type, env, nonempty = 0):
if base_type.is_cpp_class:
from ExprNodes import TupleNode
if isinstance(self.dimension, TupleNode):
args = self.dimension.args
else:
args = self.dimension,
values = [v.analyse_as_type(env) for v in args]
if None in values:
ix = values.index(None)
error(args[ix].pos, "Template parameter not a type.")
return error_type
base_type = base_type.specialize_here(self.pos, values)
return self.base.analyse(base_type, env, nonempty = nonempty)
if self.dimension:
self.dimension.analyse_const_expression(env)
if not self.dimension.type.is_int:
error(self.dimension.pos, "Array dimension not integer")
size = self.dimension.get_constant_c_result_code()
if size is not None:
try:
size = int(size)
except ValueError:
# runtime constant?
pass
else:
size = None
if not base_type.is_complete():
error(self.pos,
"Array element type '%s' is incomplete" % base_type)
if base_type.is_pyobject:
error(self.pos,
"Array element cannot be a Python object")
if base_type.is_cfunction:
error(self.pos,
"Array element cannot be a function")
array_type = PyrexTypes.c_array_type(base_type, size)
return self.base.analyse(array_type, env, nonempty = nonempty)
class CFuncDeclaratorNode(CDeclaratorNode):
# base CDeclaratorNode
# args [CArgDeclNode]
# has_varargs boolean
# exception_value ConstNode
# exception_check boolean True if PyErr_Occurred check needed
# nogil boolean Can be called without gil
# with_gil boolean Acquire gil around function body
child_attrs = ["base", "args", "exception_value"]
overridable = 0
optional_arg_count = 0
def analyse(self, return_type, env, nonempty = 0, directive_locals = {}):
if nonempty:
nonempty -= 1
func_type_args = []
for i, arg_node in enumerate(self.args):
name_declarator, type = arg_node.analyse(env, nonempty = nonempty,
is_self_arg = (i == 0 and env.is_c_class_scope))
name = name_declarator.name
if name in directive_locals:
type_node = directive_locals[name]
other_type = type_node.analyse_as_type(env)
if other_type is None:
error(type_node.pos, "Not a type")
elif (type is not PyrexTypes.py_object_type
and not type.same_as(other_type)):
error(self.base.pos, "Signature does not agree with previous declaration")
error(type_node.pos, "Previous declaration here")
else:
type = other_type
if name_declarator.cname:
error(self.pos,
"Function argument cannot have C name specification")
if i==0 and env.is_c_class_scope and type.is_unspecified:
# fix the type of self
type = env.parent_type
# Turn *[] argument into **
if type.is_array:
type = PyrexTypes.c_ptr_type(type.base_type)
# Catch attempted C-style func(void) decl
if type.is_void:
error(arg_node.pos, "Use spam() rather than spam(void) to declare a function with no arguments.")
func_type_args.append(
PyrexTypes.CFuncTypeArg(name, type, arg_node.pos))
if arg_node.default:
self.optional_arg_count += 1
elif self.optional_arg_count:
error(self.pos, "Non-default argument follows default argument")
if self.optional_arg_count:
scope = StructOrUnionScope()
arg_count_member = '%sn' % Naming.pyrex_prefix
scope.declare_var(arg_count_member, PyrexTypes.c_int_type, self.pos)
for arg in func_type_args[len(func_type_args)-self.optional_arg_count:]:
scope.declare_var(arg.name, arg.type, arg.pos, allow_pyobject = 1)
struct_cname = env.mangle(Naming.opt_arg_prefix, self.base.name)
self.op_args_struct = env.global_scope().declare_struct_or_union(name = struct_cname,
kind = 'struct',
scope = scope,
typedef_flag = 0,
pos = self.pos,
cname = struct_cname)
self.op_args_struct.defined_in_pxd = 1
self.op_args_struct.used = 1
exc_val = None
exc_check = 0
if self.exception_check == '+':
env.add_include_file('ios') # for std::ios_base::failure
env.add_include_file('new') # for std::bad_alloc
env.add_include_file('stdexcept')
env.add_include_file('typeinfo') # for std::bad_cast
if return_type.is_pyobject \
and (self.exception_value or self.exception_check) \
and self.exception_check != '+':
error(self.pos,
"Exception clause not allowed for function returning Python object")
else:
if self.exception_value:
self.exception_value.analyse_const_expression(env)
if self.exception_check == '+':
self.exception_value.analyse_types(env)
exc_val_type = self.exception_value.type
if not exc_val_type.is_error and \
not exc_val_type.is_pyobject and \
not (exc_val_type.is_cfunction and not exc_val_type.return_type.is_pyobject and len(exc_val_type.args)==0):
error(self.exception_value.pos,
"Exception value must be a Python exception or cdef function with no arguments.")
exc_val = self.exception_value
else:
self.exception_value = self.exception_value.coerce_to(return_type, env)
if self.exception_value.analyse_const_expression(env):
exc_val = self.exception_value.get_constant_c_result_code()
if exc_val is None:
raise InternalError("get_constant_c_result_code not implemented for %s" %
self.exception_value.__class__.__name__)
if not return_type.assignable_from(self.exception_value.type):
error(self.exception_value.pos,
"Exception value incompatible with function return type")
exc_check = self.exception_check
if return_type.is_cfunction:
error(self.pos,
"Function cannot return a function")
func_type = PyrexTypes.CFuncType(
return_type, func_type_args, self.has_varargs,
optional_arg_count = self.optional_arg_count,
exception_value = exc_val, exception_check = exc_check,
calling_convention = self.base.calling_convention,
nogil = self.nogil, with_gil = self.with_gil, is_overridable = self.overridable)
if self.optional_arg_count:
func_type.op_arg_struct = PyrexTypes.c_ptr_type(self.op_args_struct.type)
callspec = env.directives['callspec']
if callspec:
current = func_type.calling_convention
if current and current != callspec:
error(self.pos, "cannot have both '%s' and '%s' "
"calling conventions" % (current, callspec))
func_type.calling_convention = callspec
return self.base.analyse(func_type, env)
class CArgDeclNode(Node):
# Item in a function declaration argument list.
#
# base_type CBaseTypeNode
# declarator CDeclaratorNode
# not_none boolean Tagged with 'not None'
# or_none boolean Tagged with 'or None'
# accept_none boolean Resolved boolean for not_none/or_none
# default ExprNode or None
# default_value PyObjectConst constant for default value
# annotation ExprNode or None Py3 function arg annotation
# is_self_arg boolean Is the "self" arg of an extension type method
# is_type_arg boolean Is the "class" arg of an extension type classmethod
# is_kw_only boolean Is a keyword-only argument
child_attrs = ["base_type", "declarator", "default"]
is_self_arg = 0
is_type_arg = 0
is_generic = 1
kw_only = 0
not_none = 0
or_none = 0
type = None
name_declarator = None
default_value = None
annotation = None
def analyse(self, env, nonempty = 0, is_self_arg = False):
if is_self_arg:
self.base_type.is_self_arg = self.is_self_arg = True
if self.type is None:
# The parser may missinterpret names as types...
# We fix that here.
if isinstance(self.declarator, CNameDeclaratorNode) and self.declarator.name == '':
if nonempty:
self.declarator.name = self.base_type.name
self.base_type.name = None
self.base_type.is_basic_c_type = False
could_be_name = True
else:
could_be_name = False
base_type = self.base_type.analyse(env, could_be_name = could_be_name)
if hasattr(self.base_type, 'arg_name') and self.base_type.arg_name:
self.declarator.name = self.base_type.arg_name
# The parser is unable to resolve the ambiguity of [] as part of the
# type (e.g. in buffers) or empty declarator (as with arrays).
# This is only arises for empty multi-dimensional arrays.
if (base_type.is_array
and isinstance(self.base_type, TemplatedTypeNode)
and isinstance(self.declarator, CArrayDeclaratorNode)):
declarator = self.declarator
while isinstance(declarator.base, CArrayDeclaratorNode):
declarator = declarator.base
declarator.base = self.base_type.array_declarator
base_type = base_type.base_type
return self.declarator.analyse(base_type, env, nonempty = nonempty)
else:
return self.name_declarator, self.type
def calculate_default_value_code(self, code):
if self.default_value is None:
if self.default:
if self.default.is_literal:
# will not output any code, just assign the result_code
self.default.generate_evaluation_code(code)
return self.type.cast_code(self.default.result())
self.default_value = code.get_argument_default_const(self.type)
return self.default_value
def annotate(self, code):
if self.default:
self.default.annotate(code)
class CBaseTypeNode(Node):
# Abstract base class for C base type nodes.
#
# Processing during analyse_declarations phase:
#
# analyse
# Returns the type.
pass
def analyse_as_type(self, env):
return self.analyse(env)
class CAnalysedBaseTypeNode(Node):
# type type
child_attrs = []
def analyse(self, env, could_be_name = False):
return self.type
class CSimpleBaseTypeNode(CBaseTypeNode):
# name string
# module_path [string] Qualifying name components
# is_basic_c_type boolean
# signed boolean
# longness integer
# complex boolean
# is_self_arg boolean Is self argument of C method
# ##is_type_arg boolean Is type argument of class method
child_attrs = []
arg_name = None # in case the argument name was interpreted as a type
module_path = []
is_basic_c_type = False
complex = False
def analyse(self, env, could_be_name = False):
# Return type descriptor.
#print "CSimpleBaseTypeNode.analyse: is_self_arg =", self.is_self_arg ###
type = None
if self.is_basic_c_type:
type = PyrexTypes.simple_c_type(self.signed, self.longness, self.name)
if not type:
error(self.pos, "Unrecognised type modifier combination")
elif self.name == "object" and not self.module_path:
type = py_object_type
elif self.name is None:
if self.is_self_arg and env.is_c_class_scope:
#print "CSimpleBaseTypeNode.analyse: defaulting to parent type" ###
type = env.parent_type
## elif self.is_type_arg and env.is_c_class_scope:
## type = Builtin.type_type
else:
type = py_object_type
else:
if self.module_path:
scope = env.find_imported_module(self.module_path, self.pos)
else:
scope = env
if scope:
if scope.is_c_class_scope:
scope = scope.global_scope()
entry = scope.lookup(self.name)
if entry and entry.is_type:
type = entry.type
elif could_be_name:
if self.is_self_arg and env.is_c_class_scope:
type = env.parent_type
## elif self.is_type_arg and env.is_c_class_scope:
## type = Builtin.type_type
else:
type = py_object_type
self.arg_name = self.name
else:
if self.templates:
if not self.name in self.templates:
error(self.pos, "'%s' is not a type identifier" % self.name)
type = PyrexTypes.TemplatePlaceholderType(self.name)
else:
error(self.pos, "'%s' is not a type identifier" % self.name)
if self.complex:
if not type.is_numeric or type.is_complex:
error(self.pos, "can only complexify c numeric types")
type = PyrexTypes.CComplexType(type)
type.create_declaration_utility_code(env)
elif type is Builtin.complex_type:
# Special case: optimise builtin complex type into C's
# double complex. The parser cannot do this (as for the
# normal scalar types) as the user may have redeclared the
# 'complex' type. Testing for the exact type here works.
type = PyrexTypes.c_double_complex_type
type.create_declaration_utility_code(env)
self.complex = True
if type:
return type
else:
return PyrexTypes.error_type
class MemoryViewSliceTypeNode(CBaseTypeNode):
child_attrs = ['base_type_node', 'axes']
def analyse(self, env, could_be_name = False):
base_type = self.base_type_node.analyse(env)
if base_type.is_error: return base_type
import MemoryView
try:
axes_specs = MemoryView.get_axes_specs(env, self.axes)
except CompileError, e:
error(e.position, e.message_only)
self.type = PyrexTypes.ErrorType()
return self.type
MemoryView.validate_memslice_dtype(self.pos, base_type)
self.type = PyrexTypes.MemoryViewSliceType(base_type, axes_specs)
self.use_memview_utilities(env)
return self.type
def use_memview_utilities(self, env):
import MemoryView
env.use_utility_code(MemoryView.view_utility_code)
class CNestedBaseTypeNode(CBaseTypeNode):
# For C++ classes that live inside other C++ classes.
# name string
# base_type CBaseTypeNode
child_attrs = ['base_type']
def analyse(self, env, could_be_name = None):
base_type = self.base_type.analyse(env)
if base_type is PyrexTypes.error_type:
return PyrexTypes.error_type
if not base_type.is_cpp_class:
error(self.pos, "'%s' is not a valid type scope" % base_type)
return PyrexTypes.error_type
type_entry = base_type.scope.lookup_here(self.name)
if not type_entry or not type_entry.is_type:
error(self.pos, "'%s.%s' is not a type identifier" % (base_type, self.name))
return PyrexTypes.error_type
return type_entry.type
class TemplatedTypeNode(CBaseTypeNode):
# After parsing:
# positional_args [ExprNode] List of positional arguments
# keyword_args DictNode Keyword arguments
# base_type_node CBaseTypeNode
# After analysis:
# type PyrexTypes.BufferType or PyrexTypes.CppClassType ...containing the right options
child_attrs = ["base_type_node", "positional_args",
"keyword_args", "dtype_node"]
dtype_node = None
name = None
def analyse(self, env, could_be_name = False, base_type = None):
if base_type is None:
base_type = self.base_type_node.analyse(env)
if base_type.is_error: return base_type
if base_type.is_cpp_class:
# Templated class
if self.keyword_args and self.keyword_args.key_value_pairs:
error(self.pos, "c++ templates cannot take keyword arguments");
self.type = PyrexTypes.error_type
else:
template_types = []
for template_node in self.positional_args:
type = template_node.analyse_as_type(env)
if type is None:
error(template_node.pos, "unknown type in template argument")
return error_type
template_types.append(type)
self.type = base_type.specialize_here(self.pos, template_types)
elif base_type.is_pyobject:
# Buffer
import Buffer
options = Buffer.analyse_buffer_options(
self.pos,
env,
self.positional_args,
self.keyword_args,
base_type.buffer_defaults)
if sys.version_info[0] < 3:
# Py 2.x enforces byte strings as keyword arguments ...
options = dict([ (name.encode('ASCII'), value)
for name, value in options.items() ])
self.type = PyrexTypes.BufferType(base_type, **options)
else:
# Array
empty_declarator = CNameDeclaratorNode(self.pos, name="", cname=None)
if len(self.positional_args) > 1 or self.keyword_args.key_value_pairs:
error(self.pos, "invalid array declaration")
self.type = PyrexTypes.error_type
else:
# It would be nice to merge this class with CArrayDeclaratorNode,
# but arrays are part of the declaration, not the type...
if not self.positional_args:
dimension = None
else:
dimension = self.positional_args[0]
self.array_declarator = CArrayDeclaratorNode(self.pos,
base = empty_declarator,
dimension = dimension)
self.type = self.array_declarator.analyse(base_type, env)[1]
return self.type
class CComplexBaseTypeNode(CBaseTypeNode):
# base_type CBaseTypeNode
# declarator CDeclaratorNode
child_attrs = ["base_type", "declarator"]
def analyse(self, env, could_be_name = False):
base = self.base_type.analyse(env, could_be_name)
_, type = self.declarator.analyse(base, env)
return type
class CVarDefNode(StatNode):
# C variable definition or forward/extern function declaration.
#
# visibility 'private' or 'public' or 'extern'
# base_type CBaseTypeNode
# declarators [CDeclaratorNode]
# in_pxd boolean
# api boolean
# decorators [cython.locals(...)] or None
# directive_locals { string : NameNode } locals defined by cython.locals(...)
child_attrs = ["base_type", "declarators"]
decorators = None
directive_locals = None
def analyse_declarations(self, env, dest_scope = None):
if self.directive_locals is None:
self.directive_locals = {}
if not dest_scope:
dest_scope = env
self.dest_scope = dest_scope
base_type = self.base_type.analyse(env)
visibility = self.visibility
for declarator in self.declarators:
if isinstance(declarator, CFuncDeclaratorNode):
name_declarator, type = declarator.analyse(base_type, env, directive_locals=self.directive_locals)
else:
name_declarator, type = declarator.analyse(base_type, env)
if not type.is_complete():
if not (self.visibility == 'extern' and type.is_array or type.is_memoryviewslice):
error(declarator.pos,
"Variable type '%s' is incomplete" % type)
if self.visibility == 'extern' and type.is_pyobject:
error(declarator.pos,
"Python object cannot be declared extern")
name = name_declarator.name
cname = name_declarator.cname
if name == '':
error(declarator.pos, "Missing name in declaration.")
return
if type.is_cfunction:
entry = dest_scope.declare_cfunction(name, type, declarator.pos,
cname = cname, visibility = self.visibility,
in_pxd = self.in_pxd, api = self.api)
if entry is not None:
entry.directive_locals = copy.copy(self.directive_locals)
else:
if self.directive_locals:
error(self.pos, "Decorators can only be followed by functions")
entry = dest_scope.declare_var(name, type, declarator.pos,
cname = cname, visibility = visibility,
in_pxd = self.in_pxd, api = self.api, is_cdef = 1)
class CStructOrUnionDefNode(StatNode):
# name string
# cname string or None
# kind "struct" or "union"
# typedef_flag boolean
# visibility "public" or "private"
# api boolean
# in_pxd boolean
# attributes [CVarDefNode] or None
# entry Entry
# packed boolean
child_attrs = ["attributes"]
def declare(self, env, scope=None):
if self.visibility == 'extern' and self.packed and not scope:
error(self.pos, "Cannot declare extern struct as 'packed'")
self.entry = env.declare_struct_or_union(
self.name, self.kind, scope, self.typedef_flag, self.pos,
self.cname, visibility = self.visibility, api = self.api,
packed = self.packed)
def analyse_declarations(self, env):
scope = None
if self.attributes is not None:
scope = StructOrUnionScope(self.name)
self.declare(env, scope)
if self.attributes is not None:
if self.in_pxd and not env.in_cinclude:
self.entry.defined_in_pxd = 1
for attr in self.attributes:
attr.analyse_declarations(env, scope)
if self.visibility != 'extern':
for attr in scope.var_entries:
type = attr.type
while type.is_array:
type = type.base_type
if type == self.entry.type:
error(attr.pos, "Struct cannot contain itself as a member.")
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
pass
class CppClassNode(CStructOrUnionDefNode):
# name string
# cname string or None
# visibility "extern"
# in_pxd boolean
# attributes [CVarDefNode] or None
# entry Entry
# base_classes [string]
# templates [string] or None
def declare(self, env):
if self.templates is None:
template_types = None
else:
template_types = [PyrexTypes.TemplatePlaceholderType(template_name) for template_name in self.templates]
self.entry = env.declare_cpp_class(
self.name, None, self.pos,
self.cname, base_classes = [], visibility = self.visibility, templates = template_types)
def analyse_declarations(self, env):
scope = None
if self.attributes is not None:
scope = CppClassScope(self.name, env)
base_class_types = []
for base_class_name in self.base_classes:
base_class_entry = env.lookup(base_class_name)
if base_class_entry is None:
error(self.pos, "'%s' not found" % base_class_name)
elif not base_class_entry.is_type or not base_class_entry.type.is_cpp_class:
error(self.pos, "'%s' is not a cpp class type" % base_class_name)
else:
base_class_types.append(base_class_entry.type)
if self.templates is None:
template_types = None
else:
template_types = [PyrexTypes.TemplatePlaceholderType(template_name) for template_name in self.templates]
self.entry = env.declare_cpp_class(
self.name, scope, self.pos,
self.cname, base_class_types, visibility = self.visibility, templates = template_types)
if self.entry is None:
return
self.entry.is_cpp_class = 1
if self.attributes is not None:
if self.in_pxd and not env.in_cinclude:
self.entry.defined_in_pxd = 1
for attr in self.attributes:
attr.analyse_declarations(scope)
class CEnumDefNode(StatNode):
# name string or None
# cname string or None
# items [CEnumDefItemNode]
# typedef_flag boolean
# visibility "public" or "private"
# api boolean
# in_pxd boolean
# entry Entry
child_attrs = ["items"]
def declare(self, env):
self.entry = env.declare_enum(self.name, self.pos,
cname = self.cname, typedef_flag = self.typedef_flag,
visibility = self.visibility, api = self.api)
def analyse_declarations(self, env):
if self.items is not None:
if self.in_pxd and not env.in_cinclude:
self.entry.defined_in_pxd = 1
for item in self.items:
item.analyse_declarations(env, self.entry)
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
if self.visibility == 'public' or self.api:
temp = code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=True)
for item in self.entry.enum_values:
code.putln("%s = PyInt_FromLong(%s); %s" % (
temp,
item.cname,
code.error_goto_if_null(temp, item.pos)))
code.put_gotref(temp)
code.putln('if (__Pyx_SetAttrString(%s, "%s", %s) < 0) %s' % (
Naming.module_cname,
item.name,
temp,
code.error_goto(item.pos)))
code.put_decref_clear(temp, PyrexTypes.py_object_type)
code.funcstate.release_temp(temp)
class CEnumDefItemNode(StatNode):
# name string
# cname string or None
# value ExprNode or None
child_attrs = ["value"]
def analyse_declarations(self, env, enum_entry):
if self.value:
self.value.analyse_const_expression(env)
if not self.value.type.is_int:
self.value = self.value.coerce_to(PyrexTypes.c_int_type, env)
self.value.analyse_const_expression(env)
entry = env.declare_const(self.name, enum_entry.type,
self.value, self.pos, cname = self.cname,
visibility = enum_entry.visibility, api = enum_entry.api)
enum_entry.enum_values.append(entry)
class CTypeDefNode(StatNode):
# base_type CBaseTypeNode
# declarator CDeclaratorNode
# visibility "public" or "private"
# api boolean
# in_pxd boolean
child_attrs = ["base_type", "declarator"]
def analyse_declarations(self, env):
base = self.base_type.analyse(env)
name_declarator, type = self.declarator.analyse(base, env)
name = name_declarator.name
cname = name_declarator.cname
entry = env.declare_typedef(name, type, self.pos,
cname = cname, visibility = self.visibility, api = self.api)
if self.in_pxd and not env.in_cinclude:
entry.defined_in_pxd = 1
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
pass
class FuncDefNode(StatNode, BlockNode):
# Base class for function definition nodes.
#
# return_type PyrexType
# #filename string C name of filename string const
# entry Symtab.Entry
# needs_closure boolean Whether or not this function has inner functions/classes/yield
# needs_outer_scope boolean Whether or not this function requires outer scope
# pymethdef_required boolean Force Python method struct generation
# directive_locals { string : NameNode } locals defined by cython.locals(...)
# star_arg PyArgDeclNode or None * argument
# starstar_arg PyArgDeclNode or None ** argument
py_func = None
assmt = None
needs_closure = False
needs_outer_scope = False
pymethdef_required = False
is_generator = False
is_generator_body = False
modifiers = []
star_arg = None
starstar_arg = None
def analyse_default_values(self, env):
genv = env.global_scope()
default_seen = 0
for arg in self.args:
if arg.default:
default_seen = 1
if arg.is_generic:
arg.default.analyse_types(env)
arg.default = arg.default.coerce_to(arg.type, genv)
else:
error(arg.pos,
"This argument cannot have a default value")
arg.default = None
elif arg.kw_only:
default_seen = 1
elif default_seen:
error(arg.pos, "Non-default argument following default argument")
def align_argument_type(self, env, arg):
directive_locals = self.directive_locals
type = arg.type
if arg.name in directive_locals:
type_node = directive_locals[arg.name]
other_type = type_node.analyse_as_type(env)
if other_type is None:
error(type_node.pos, "Not a type")
elif (type is not PyrexTypes.py_object_type
and not type.same_as(other_type)):
error(arg.base_type.pos, "Signature does not agree with previous declaration")
error(type_node.pos, "Previous declaration here")
else:
arg.type = other_type
return arg
def need_gil_acquisition(self, lenv):
return 0
def create_local_scope(self, env):
genv = env
while genv.is_py_class_scope or genv.is_c_class_scope:
genv = genv.outer_scope
if self.needs_closure:
lenv = ClosureScope(name=self.entry.name,
outer_scope = genv,
parent_scope = env,
scope_name=self.entry.cname)
else:
lenv = LocalScope(name=self.entry.name,
outer_scope=genv,
parent_scope=env)
lenv.return_type = self.return_type
type = self.entry.type
if type.is_cfunction:
lenv.nogil = type.nogil and not type.with_gil
self.local_scope = lenv
lenv.directives = env.directives
return lenv
def generate_function_body(self, env, code):
self.body.generate_execution_code(code)
def generate_function_definitions(self, env, code):
import Buffer, MemoryView
lenv = self.local_scope
if lenv.is_closure_scope and not lenv.is_passthrough:
outer_scope_cname = "%s->%s" % (Naming.cur_scope_cname,
Naming.outer_scope_cname)
else:
outer_scope_cname = Naming.outer_scope_cname
lenv.mangle_closure_cnames(outer_scope_cname)
# Generate closure function definitions
self.body.generate_function_definitions(lenv, code)
# generate lambda function definitions
self.generate_lambda_definitions(lenv, code)
is_getbuffer_slot = (self.entry.name == "__getbuffer__" and
self.entry.scope.is_c_class_scope)
is_releasebuffer_slot = (self.entry.name == "__releasebuffer__" and
self.entry.scope.is_c_class_scope)
is_buffer_slot = is_getbuffer_slot or is_releasebuffer_slot
if is_buffer_slot:
if 'cython_unused' not in self.modifiers:
self.modifiers = self.modifiers + ['cython_unused']
preprocessor_guard = None
if self.entry.is_special and not is_buffer_slot:
slot = TypeSlots.method_name_to_slot.get(self.entry.name)
if slot:
preprocessor_guard = slot.preprocessor_guard_code()
if (self.entry.name == '__long__' and
not self.entry.scope.lookup_here('__int__')):
preprocessor_guard = None
profile = code.globalstate.directives['profile']
if profile and lenv.nogil:
warning(self.pos, "Cannot profile nogil function.", 1)
profile = False
if profile:
code.globalstate.use_utility_code(profile_utility_code)
# Generate C code for header and body of function
code.enter_cfunc_scope()
code.return_from_error_cleanup_label = code.new_label()
# ----- Top-level constants used by this function
code.mark_pos(self.pos)
self.generate_cached_builtins_decls(lenv, code)
# ----- Function header
code.putln("")
if preprocessor_guard:
code.putln(preprocessor_guard)
with_pymethdef = (self.needs_assignment_synthesis(env, code) or
self.pymethdef_required)
if self.py_func:
self.py_func.generate_function_header(code,
with_pymethdef = with_pymethdef,
proto_only=True)
self.generate_function_header(code,
with_pymethdef = with_pymethdef)
# ----- Local variable declarations
# Find function scope
cenv = env
while cenv.is_py_class_scope or cenv.is_c_class_scope:
cenv = cenv.outer_scope
if self.needs_closure:
code.put(lenv.scope_class.type.declaration_code(Naming.cur_scope_cname))
code.putln(";")
elif self.needs_outer_scope:
if lenv.is_passthrough:
code.put(lenv.scope_class.type.declaration_code(Naming.cur_scope_cname))
code.putln(";")
code.put(cenv.scope_class.type.declaration_code(Naming.outer_scope_cname))
code.putln(";")
self.generate_argument_declarations(lenv, code)
for entry in lenv.var_entries:
if not entry.in_closure:
code.put_var_declaration(entry)
# Initialize the return variable __pyx_r
init = ""
if not self.return_type.is_void:
if self.return_type.is_pyobject:
init = " = NULL"
elif self.return_type.is_memoryviewslice:
init = "= {0, 0}"
code.putln(
"%s%s;" %
(self.return_type.declaration_code(Naming.retval_cname),
init))
tempvardecl_code = code.insertion_point()
self.generate_keyword_list(code)
if profile:
code.put_trace_declarations()
# ----- Extern library function declarations
lenv.generate_library_function_declarations(code)
# ----- GIL acquisition
acquire_gil = self.acquire_gil
# See if we need to acquire the GIL for variable declarations and
acquire_gil_for_var_decls_only = (lenv.nogil and
lenv.has_with_gil_block)
use_refnanny = not lenv.nogil or acquire_gil_for_var_decls_only
if acquire_gil or acquire_gil_for_var_decls_only:
code.put_ensure_gil()
# ----- set up refnanny
if use_refnanny:
tempvardecl_code.put_declare_refcount_context()
code.put_setup_refcount_context(self.entry.name)
# ----- Automatic lead-ins for certain special functions
if is_getbuffer_slot:
self.getbuffer_init(code)
# ----- Create closure scope object
if self.needs_closure:
code.putln("%s = (%s)%s->tp_new(%s, %s, NULL);" % (
Naming.cur_scope_cname,
lenv.scope_class.type.declaration_code(''),
lenv.scope_class.type.typeptr_cname,
lenv.scope_class.type.typeptr_cname,
Naming.empty_tuple))
code.putln("if (unlikely(!%s)) {" % Naming.cur_scope_cname)
if is_getbuffer_slot:
self.getbuffer_error_cleanup(code)
if use_refnanny:
code.put_finish_refcount_context()
if acquire_gil_for_var_decls_only:
code.put_release_ensured_gil()
# FIXME: what if the error return value is a Python value?
code.putln("return %s;" % self.error_value())
code.putln("}")
code.put_gotref(Naming.cur_scope_cname)
# Note that it is unsafe to decref the scope at this point.
if self.needs_outer_scope:
code.putln("%s = (%s)%s;" % (
outer_scope_cname,
cenv.scope_class.type.declaration_code(''),
Naming.self_cname))
if lenv.is_passthrough:
code.putln("%s = %s;" % (Naming.cur_scope_cname, outer_scope_cname));
elif self.needs_closure:
# inner closures own a reference to their outer parent
code.put_incref(outer_scope_cname, cenv.scope_class.type)
code.put_giveref(outer_scope_cname)
# ----- Trace function call
if profile:
# this looks a bit late, but if we don't get here due to a
# fatal error before hand, it's not really worth tracing
code.put_trace_call(self.entry.name, self.pos)
# ----- Fetch arguments
self.generate_argument_parsing_code(env, code)
# If an argument is assigned to in the body, we must
# incref it to properly keep track of refcounts.
is_cdef = isinstance(self, CFuncDefNode)
for entry in lenv.arg_entries:
if entry.type.is_pyobject:
if (acquire_gil or entry.assignments) and not entry.in_closure:
code.put_var_incref(entry)
# Note: defaults are always increffed. For def functions, we
# we aquire arguments from object converstion, so we have
# new references. If we are a cdef function, we need to
# incref our arguments
if is_cdef and entry.type.is_memoryviewslice:
code.put_incref_memoryviewslice(entry.cname,
have_gil=not lenv.nogil)
# ----- Initialise local buffer auxiliary variables
for entry in lenv.var_entries + lenv.arg_entries:
if entry.type.is_buffer and entry.buffer_aux.buflocal_nd_var.used:
Buffer.put_init_vars(entry, code)
# ----- Initialise local memoryviewslices
for entry in lenv.var_entries:
if entry.visibility == "private" and not entry.used:
continue
# ----- Check and convert arguments
self.generate_argument_type_tests(code)
# ----- Acquire buffer arguments
for entry in lenv.arg_entries:
if entry.type.is_buffer:
Buffer.put_acquire_arg_buffer(entry, code, self.pos)
if acquire_gil_for_var_decls_only:
code.put_release_ensured_gil()
# -------------------------
# ----- Function body -----
# -------------------------
self.generate_function_body(env, code)
# ----- Default return value
code.putln("")
if self.return_type.is_pyobject:
#if self.return_type.is_extension_type:
# lhs = "(PyObject *)%s" % Naming.retval_cname
#else:
lhs = Naming.retval_cname
code.put_init_to_py_none(lhs, self.return_type)
else:
val = self.return_type.default_value
if val:
code.putln("%s = %s;" % (Naming.retval_cname, val))
# ----- Error cleanup
if code.error_label in code.labels_used:
code.put_goto(code.return_label)
code.put_label(code.error_label)
for cname, type in code.funcstate.all_managed_temps():
code.put_xdecref(cname, type, have_gil=not lenv.nogil)
# Clean up buffers -- this calls a Python function
# so need to save and restore error state
buffers_present = len(lenv.buffer_entries) > 0
memslice_entries = [e for e in lenv.entries.itervalues()
if e.type.is_memoryviewslice]
if buffers_present:
code.globalstate.use_utility_code(restore_exception_utility_code)
code.putln("{ PyObject *__pyx_type, *__pyx_value, *__pyx_tb;")
code.putln("__Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb);")
for entry in lenv.buffer_entries:
Buffer.put_release_buffer_code(code, entry)
#code.putln("%s = 0;" % entry.cname)
code.putln("__Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);}")
if self.return_type.is_memoryviewslice:
MemoryView.put_init_entry(Naming.retval_cname, code)
err_val = Naming.retval_cname
else:
err_val = self.error_value()
exc_check = self.caller_will_check_exceptions()
if err_val is not None or exc_check:
# TODO: Fix exception tracing (though currently unused by cProfile).
# code.globalstate.use_utility_code(get_exception_tuple_utility_code)
# code.put_trace_exception()
if lenv.nogil:
code.putln("{")
code.put_ensure_gil()
code.put_add_traceback(self.entry.qualified_name)
if lenv.nogil:
code.put_release_ensured_gil()
code.putln("}")
else:
warning(self.entry.pos, "Unraisable exception in function '%s'." \
% self.entry.qualified_name, 0)
format_tuple = (
self.entry.qualified_name,
Naming.clineno_cname,
Naming.lineno_cname,
Naming.filename_cname,
)
code.putln(
'__Pyx_WriteUnraisable("%s", %s, %s, %s);' % format_tuple)
env.use_utility_code(unraisable_exception_utility_code)
env.use_utility_code(restore_exception_utility_code)
default_retval = self.return_type.default_value
if err_val is None and default_retval:
err_val = default_retval
if err_val is not None:
code.putln("%s = %s;" % (Naming.retval_cname, err_val))
if is_getbuffer_slot:
self.getbuffer_error_cleanup(code)
# If we are using the non-error cleanup section we should
# jump past it if we have an error. The if-test below determine
# whether this section is used.
if buffers_present or is_getbuffer_slot:
code.put_goto(code.return_from_error_cleanup_label)
# ----- Non-error return cleanup
code.put_label(code.return_label)
for entry in lenv.buffer_entries:
if entry.used:
Buffer.put_release_buffer_code(code, entry)
if is_getbuffer_slot:
self.getbuffer_normal_cleanup(code)
if self.return_type.is_memoryviewslice:
# See if our return value is uninitialized on non-error return
# import MemoryView
# MemoryView.err_if_nogil_initialized_check(self.pos, env)
cond = code.unlikely(self.return_type.error_condition(
Naming.retval_cname))
code.putln(
'if (%s) {' % cond)
if env.nogil:
code.put_ensure_gil()
code.putln(
'PyErr_SetString('
'PyExc_TypeError,'
'"Memoryview return value is not initialized");')
if env.nogil:
code.put_release_ensured_gil()
code.putln(
'}')
# ----- Return cleanup for both error and no-error return
code.put_label(code.return_from_error_cleanup_label)
for entry in lenv.var_entries:
if not entry.used or entry.in_closure:
continue
if entry.type.is_memoryviewslice:
code.put_xdecref_memoryviewslice(entry.cname,
have_gil=not lenv.nogil)
elif entry.type.is_pyobject:
code.put_var_decref(entry)
# Decref any increfed args
for entry in lenv.arg_entries:
if entry.type.is_pyobject:
if (acquire_gil or entry.assignments) and not entry.in_closure:
code.put_var_decref(entry)
if entry.type.is_memoryviewslice:
code.put_xdecref_memoryviewslice(entry.cname,
have_gil=not lenv.nogil)
if self.needs_closure:
code.put_decref(Naming.cur_scope_cname, lenv.scope_class.type)
# ----- Return
# This code is duplicated in ModuleNode.generate_module_init_func
if not lenv.nogil:
default_retval = self.return_type.default_value
err_val = self.error_value()
if err_val is None and default_retval:
err_val = default_retval
if self.return_type.is_pyobject:
code.put_xgiveref(self.return_type.as_pyobject(Naming.retval_cname))
if self.entry.is_special and self.entry.name == "__hash__":
# Returning -1 for __hash__ is supposed to signal an error
# We do as Python instances and coerce -1 into -2.
code.putln("if (unlikely(%s == -1) && !PyErr_Occurred()) %s = -2;" % (
Naming.retval_cname, Naming.retval_cname))
if profile:
if self.return_type.is_pyobject:
code.put_trace_return(Naming.retval_cname)
else:
code.put_trace_return("Py_None")
if not lenv.nogil:
# GIL holding funcion
code.put_finish_refcount_context()
if acquire_gil or acquire_gil_for_var_decls_only:
code.put_release_ensured_gil()
if not self.return_type.is_void:
code.putln("return %s;" % Naming.retval_cname)
code.putln("}")
if preprocessor_guard:
code.putln("#endif /*!(%s)*/" % preprocessor_guard)
# ----- Go back and insert temp variable declarations
tempvardecl_code.put_temp_declarations(code.funcstate)
if code.funcstate.should_declare_error_indicator:
# Initialize these variables to silence compiler warnings
tempvardecl_code.putln("int %s = 0;" % Naming.lineno_cname)
tempvardecl_code.putln("const char *%s = NULL;" %
Naming.filename_cname)
if code.c_line_in_traceback:
tempvardecl_code.putln("int %s = 0;" % Naming.clineno_cname)
# ----- Python version
code.exit_cfunc_scope()
if self.py_func:
self.py_func.generate_function_definitions(env, code)
self.generate_wrapper_functions(code)
def declare_argument(self, env, arg):
if arg.type.is_void:
error(arg.pos, "Invalid use of 'void'")
elif not arg.type.is_complete() and not (arg.type.is_array or arg.type.is_memoryviewslice):
error(arg.pos,
"Argument type '%s' is incomplete" % arg.type)
return env.declare_arg(arg.name, arg.type, arg.pos)
def generate_arg_type_test(self, arg, code):
# Generate type test for one argument.
if arg.type.typeobj_is_available():
code.globalstate.use_utility_code(arg_type_test_utility_code)
typeptr_cname = arg.type.typeptr_cname
arg_code = "((PyObject *)%s)" % arg.entry.cname
code.putln(
'if (unlikely(!__Pyx_ArgTypeTest(%s, %s, %d, "%s", %s))) %s' % (
arg_code,
typeptr_cname,
arg.accept_none,
arg.name,
arg.type.is_builtin_type,
code.error_goto(arg.pos)))
else:
error(arg.pos, "Cannot test type of extern C class "
"without type object name specification")
def generate_arg_none_check(self, arg, code):
# Generate None check for one argument.
code.putln('if (unlikely(((PyObject *)%s) == Py_None)) {' % arg.entry.cname)
code.putln('''PyErr_Format(PyExc_TypeError, "Argument '%s' must not be None"); %s''' % (
arg.name,
code.error_goto(arg.pos)))
code.putln('}')
def generate_wrapper_functions(self, code):
pass
def generate_execution_code(self, code):
# Evaluate and store argument default values
for arg in self.args:
default = arg.default
if default:
if not default.is_literal:
default.generate_evaluation_code(code)
default.make_owned_reference(code)
result = default.result_as(arg.type)
code.putln(
"%s = %s;" % (
arg.calculate_default_value_code(code),
result))
if arg.type.is_pyobject:
code.put_giveref(default.result())
default.generate_post_assignment_code(code)
default.free_temps(code)
# For Python class methods, create and store function object
if self.assmt:
self.assmt.generate_execution_code(code)
#
# Special code for the __getbuffer__ function
#
def getbuffer_init(self, code):
info = self.local_scope.arg_entries[1].cname
# Python 3.0 betas have a bug in memoryview which makes it call
# getbuffer with a NULL parameter. For now we work around this;
# the following block should be removed when this bug is fixed.
code.putln("if (%s != NULL) {" % info)
code.putln("%s->obj = Py_None; __Pyx_INCREF(Py_None);" % info)
code.put_giveref("%s->obj" % info) # Do not refnanny object within structs
code.putln("}")
def getbuffer_error_cleanup(self, code):
info = self.local_scope.arg_entries[1].cname
code.putln("if (%s != NULL && %s->obj != NULL) {"
% (info, info))
code.put_gotref("%s->obj" % info)
code.putln("__Pyx_DECREF(%s->obj); %s->obj = NULL;"
% (info, info))
code.putln("}")
def getbuffer_normal_cleanup(self, code):
info = self.local_scope.arg_entries[1].cname
code.putln("if (%s != NULL && %s->obj == Py_None) {" % (info, info))
code.put_gotref("Py_None")
code.putln("__Pyx_DECREF(Py_None); %s->obj = NULL;" % info)
code.putln("}")
class CFuncDefNode(FuncDefNode):
# C function definition.
#
# modifiers ['inline']
# visibility 'private' or 'public' or 'extern'
# base_type CBaseTypeNode
# declarator CDeclaratorNode
# body StatListNode
# api boolean
# decorators [DecoratorNode] list of decorators
#
# with_gil boolean Acquire GIL around body
# type CFuncType
# py_func wrapper for calling from Python
# overridable whether or not this is a cpdef function
# inline_in_pxd whether this is an inline function in a pxd file
child_attrs = ["base_type", "declarator", "body", "py_func"]
inline_in_pxd = False
decorators = None
directive_locals = None
override = None
def unqualified_name(self):
return self.entry.name
def analyse_declarations(self, env):
if self.directive_locals is None:
self.directive_locals = {}
self.directive_locals.update(env.directives['locals'])
base_type = self.base_type.analyse(env)
# The 2 here is because we need both function and argument names.
if isinstance(self.declarator, CFuncDeclaratorNode):
name_declarator, type = self.declarator.analyse(base_type, env,
nonempty = 2 * (self.body is not None),
directive_locals = self.directive_locals)
else:
name_declarator, type = self.declarator.analyse(base_type, env, nonempty = 2 * (self.body is not None))
if not type.is_cfunction:
error(self.pos,
"Suite attached to non-function declaration")
# Remember the actual type according to the function header
# written here, because the type in the symbol table entry
# may be different if we're overriding a C method inherited
# from the base type of an extension type.
self.type = type
type.is_overridable = self.overridable
declarator = self.declarator
while not hasattr(declarator, 'args'):
declarator = declarator.base
self.args = declarator.args
for formal_arg, type_arg in zip(self.args, type.args):
self.align_argument_type(env, type_arg)
formal_arg.type = type_arg.type
formal_arg.name = type_arg.name
formal_arg.cname = type_arg.cname
if type_arg.type.is_buffer and 'inline' in self.modifiers:
warning(formal_arg.pos, "Buffer unpacking not optimized away.", 1)
name = name_declarator.name
cname = name_declarator.cname
self.entry = env.declare_cfunction(
name, type, self.pos,
cname = cname, visibility = self.visibility, api = self.api,
defining = self.body is not None, modifiers = self.modifiers)
self.entry.inline_func_in_pxd = self.inline_in_pxd
self.return_type = type.return_type
if self.return_type.is_array and self.visibility != 'extern':
error(self.pos,
"Function cannot return an array")
if self.overridable and not env.is_module_scope:
if len(self.args) < 1 or not self.args[0].type.is_pyobject:
# An error will be produced in the cdef function
self.overridable = False
if self.overridable:
import ExprNodes
py_func_body = self.call_self_node(is_module_scope = env.is_module_scope)
self.py_func = DefNode(pos = self.pos,
name = self.entry.name,
args = self.args,
star_arg = None,
starstar_arg = None,
doc = self.doc,
body = py_func_body,
is_wrapper = 1)
self.py_func.is_module_scope = env.is_module_scope
self.py_func.analyse_declarations(env)
self.entry.as_variable = self.py_func.entry
# Reset scope entry the above cfunction
env.entries[name] = self.entry
if (not self.entry.is_final_cmethod and
(not env.is_module_scope or Options.lookup_module_cpdef)):
self.override = OverrideCheckNode(self.pos, py_func = self.py_func)
self.body = StatListNode(self.pos, stats=[self.override, self.body])
self.create_local_scope(env)
def call_self_node(self, omit_optional_args=0, is_module_scope=0):
import ExprNodes
args = self.type.args
if omit_optional_args:
args = args[:len(args) - self.type.optional_arg_count]
arg_names = [arg.name for arg in args]
if is_module_scope:
cfunc = ExprNodes.NameNode(self.pos, name=self.entry.name)
else:
self_arg = ExprNodes.NameNode(self.pos, name=arg_names[0])
cfunc = ExprNodes.AttributeNode(self.pos, obj=self_arg, attribute=self.entry.name)
skip_dispatch = not is_module_scope or Options.lookup_module_cpdef
c_call = ExprNodes.SimpleCallNode(self.pos, function=cfunc, args=[ExprNodes.NameNode(self.pos, name=n) for n in arg_names[1-is_module_scope:]], wrapper_call=skip_dispatch)
return ReturnStatNode(pos=self.pos, return_type=PyrexTypes.py_object_type, value=c_call)
def declare_arguments(self, env):
for arg in self.type.args:
if not arg.name:
error(arg.pos, "Missing argument name")
self.declare_argument(env, arg)
def need_gil_acquisition(self, lenv):
return self.type.with_gil
def nogil_check(self, env):
type = self.type
with_gil = type.with_gil
if type.nogil and not with_gil:
if type.return_type.is_pyobject:
error(self.pos,
"Function with Python return type cannot be declared nogil")
for entry in self.local_scope.var_entries:
if entry.type.is_pyobject and not entry.in_with_gil_block:
error(self.pos, "Function declared nogil has Python locals or temporaries")
def analyse_expressions(self, env):
self.local_scope.directives = env.directives
if self.py_func is not None:
# this will also analyse the default values
self.py_func.analyse_expressions(env)
else:
self.analyse_default_values(env)
self.acquire_gil = self.need_gil_acquisition(self.local_scope)
def needs_assignment_synthesis(self, env, code=None):
return False
def generate_function_header(self, code, with_pymethdef, with_opt_args = 1, with_dispatch = 1, cname = None):
scope = self.local_scope
arg_decls = []
type = self.type
for arg in type.args[:len(type.args)-type.optional_arg_count]:
arg_decl = arg.declaration_code()
entry = scope.lookup(arg.name)
if not entry.cf_used:
arg_decl = 'CYTHON_UNUSED %s' % arg_decl
arg_decls.append(arg_decl)
if with_dispatch and self.overridable:
dispatch_arg = PyrexTypes.c_int_type.declaration_code(
Naming.skip_dispatch_cname)
if self.override:
arg_decls.append(dispatch_arg)
else:
arg_decls.append('CYTHON_UNUSED %s' % dispatch_arg)
if type.optional_arg_count and with_opt_args:
arg_decls.append(type.op_arg_struct.declaration_code(Naming.optional_args_cname))
if type.has_varargs:
arg_decls.append("...")
if not arg_decls:
arg_decls = ["void"]
if cname is None:
cname = self.entry.func_cname
entity = type.function_header_code(cname, ', '.join(arg_decls))
if self.entry.visibility == 'private':
storage_class = "static "
else:
storage_class = ""
dll_linkage = None
modifiers = ""
if 'inline' in self.modifiers:
self.modifiers[self.modifiers.index('inline')] = 'cython_inline'
if self.modifiers:
modifiers = "%s " % ' '.join(self.modifiers).upper()
header = self.return_type.declaration_code(entity, dll_linkage=dll_linkage)
#print (storage_class, modifiers, header)
code.putln("%s%s%s {" % (storage_class, modifiers, header))
def generate_argument_declarations(self, env, code):
scope = self.local_scope
for arg in self.args:
if arg.default:
entry = scope.lookup(arg.name)
if self.override or entry.cf_used:
result = arg.calculate_default_value_code(code)
code.putln('%s = %s;' % (
arg.type.declaration_code(arg.cname), result))
def generate_keyword_list(self, code):
pass
def generate_argument_parsing_code(self, env, code):
i = 0
used = 0
if self.type.optional_arg_count:
scope = self.local_scope
code.putln('if (%s) {' % Naming.optional_args_cname)
for arg in self.args:
if arg.default:
entry = scope.lookup(arg.name)
if self.override or entry.cf_used:
code.putln('if (%s->%sn > %s) {' %
(Naming.optional_args_cname,
Naming.pyrex_prefix, i))
declarator = arg.declarator
while not hasattr(declarator, 'name'):
declarator = declarator.base
code.putln('%s = %s->%s;' %
(arg.cname, Naming.optional_args_cname,
self.type.opt_arg_cname(declarator.name)))
used += 1
i += 1
for _ in range(used):
code.putln('}')
code.putln('}')
def generate_argument_conversion_code(self, code):
pass
def generate_argument_type_tests(self, code):
# Generate type tests for args whose type in a parent
# class is a supertype of the declared type.
for arg in self.type.args:
if arg.needs_type_test:
self.generate_arg_type_test(arg, code)
elif arg.type.is_pyobject and not arg.accept_none:
self.generate_arg_none_check(arg, code)
def error_value(self):
if self.return_type.is_pyobject:
return "0"
else:
#return None
return self.entry.type.exception_value
def caller_will_check_exceptions(self):
return self.entry.type.exception_check
def generate_wrapper_functions(self, code):
# If the C signature of a function has changed, we need to generate
# wrappers to put in the slots here.
k = 0
entry = self.entry
func_type = entry.type
while entry.prev_entry is not None:
k += 1
entry = entry.prev_entry
entry.func_cname = "%s%swrap_%s" % (self.entry.func_cname, Naming.pyrex_prefix, k)
code.putln()
self.generate_function_header(code,
0,
with_dispatch = entry.type.is_overridable,
with_opt_args = entry.type.optional_arg_count,
cname = entry.func_cname)
if not self.return_type.is_void:
code.put('return ')
args = self.type.args
arglist = [arg.cname for arg in args[:len(args)-self.type.optional_arg_count]]
if entry.type.is_overridable:
arglist.append(Naming.skip_dispatch_cname)
elif func_type.is_overridable:
arglist.append('0')
if entry.type.optional_arg_count:
arglist.append(Naming.optional_args_cname)
elif func_type.optional_arg_count:
arglist.append('NULL')
code.putln('%s(%s);' % (self.entry.func_cname, ', '.join(arglist)))
code.putln('}')
class PyArgDeclNode(Node):
# Argument which must be a Python object (used
# for * and ** arguments).
#
# name string
# entry Symtab.Entry
# annotation ExprNode or None Py3 argument annotation
child_attrs = []
def generate_function_definitions(self, env, code):
self.entry.generate_function_definitions(env, code)
class DecoratorNode(Node):
# A decorator
#
# decorator NameNode or CallNode or AttributeNode
child_attrs = ['decorator']
class DefNode(FuncDefNode):
# A Python function definition.
#
# name string the Python name of the function
# lambda_name string the internal name of a lambda 'function'
# decorators [DecoratorNode] list of decorators
# args [CArgDeclNode] formal arguments
# doc EncodedString or None
# body StatListNode
# return_type_annotation
# ExprNode or None the Py3 return type annotation
#
# The following subnode is constructed internally
# when the def statement is inside a Python class definition.
#
# assmt AssignmentNode Function construction/assignment
# py_cfunc_node PyCFunctionNode/InnerFunctionNode The PyCFunction to create and assign
child_attrs = ["args", "star_arg", "starstar_arg", "body", "decorators"]
lambda_name = None
assmt = None
num_kwonly_args = 0
num_required_kw_args = 0
reqd_kw_flags_cname = "0"
is_wrapper = 0
no_assignment_synthesis = 0
decorators = None
return_type_annotation = None
entry = None
acquire_gil = 0
self_in_stararg = 0
py_cfunc_node = None
doc = None
def __init__(self, pos, **kwds):
FuncDefNode.__init__(self, pos, **kwds)
k = rk = r = 0
for arg in self.args:
if arg.kw_only:
k += 1
if not arg.default:
rk += 1
if not arg.default:
r += 1
self.num_kwonly_args = k
self.num_required_kw_args = rk
self.num_required_args = r
def as_cfunction(self, cfunc=None, scope=None, overridable=True):
if self.star_arg:
error(self.star_arg.pos, "cdef function cannot have star argument")
if self.starstar_arg:
error(self.starstar_arg.pos, "cdef function cannot have starstar argument")
if cfunc is None:
cfunc_args = []
for formal_arg in self.args:
name_declarator, type = formal_arg.analyse(scope, nonempty=1)
cfunc_args.append(PyrexTypes.CFuncTypeArg(name = name_declarator.name,
cname = None,
type = py_object_type,
pos = formal_arg.pos))
cfunc_type = PyrexTypes.CFuncType(return_type = py_object_type,
args = cfunc_args,
has_varargs = False,
exception_value = None,
exception_check = False,
nogil = False,
with_gil = False,
is_overridable = overridable)
cfunc = CVarDefNode(self.pos, type=cfunc_type)
else:
if scope is None:
scope = cfunc.scope
cfunc_type = cfunc.type
if len(self.args) != len(cfunc_type.args) or cfunc_type.has_varargs:
error(self.pos, "wrong number of arguments")
error(cfunc.pos, "previous declaration here")
for i, (formal_arg, type_arg) in enumerate(zip(self.args, cfunc_type.args)):
name_declarator, type = formal_arg.analyse(scope, nonempty=1,
is_self_arg = (i == 0 and scope.is_c_class_scope))
if type is None or type is PyrexTypes.py_object_type:
formal_arg.type = type_arg.type
formal_arg.name_declarator = name_declarator
import ExprNodes
if cfunc_type.exception_value is None:
exception_value = None
else:
exception_value = ExprNodes.ConstNode(self.pos, value=cfunc_type.exception_value, type=cfunc_type.return_type)
declarator = CFuncDeclaratorNode(self.pos,
base = CNameDeclaratorNode(self.pos, name=self.name, cname=None),
args = self.args,
has_varargs = False,
exception_check = cfunc_type.exception_check,
exception_value = exception_value,
with_gil = cfunc_type.with_gil,
nogil = cfunc_type.nogil)
return CFuncDefNode(self.pos,
modifiers = [],
base_type = CAnalysedBaseTypeNode(self.pos, type=cfunc_type.return_type),
declarator = declarator,
body = self.body,
doc = self.doc,
overridable = cfunc_type.is_overridable,
type = cfunc_type,
with_gil = cfunc_type.with_gil,
nogil = cfunc_type.nogil,
visibility = 'private',
api = False,
directive_locals = getattr(cfunc, 'directive_locals', {}))
def is_cdef_func_compatible(self):
"""Determines if the function's signature is compatible with a
cdef function. This can be used before calling
.as_cfunction() to see if that will be successful.
"""
if self.needs_closure:
return False
if self.star_arg or self.starstar_arg:
return False
return True
def analyse_declarations(self, env):
self.is_classmethod = self.is_staticmethod = False
if self.decorators:
for decorator in self.decorators:
func = decorator.decorator
if func.is_name:
self.is_classmethod |= func.name == 'classmethod'
self.is_staticmethod |= func.name == 'staticmethod'
if self.is_classmethod and env.lookup_here('classmethod'):
# classmethod() was overridden - not much we can do here ...
self.is_classmethod = False
if self.is_staticmethod and env.lookup_here('staticmethod'):
# staticmethod() was overridden - not much we can do here ...
self.is_staticmethod = False
if self.name == '__new__' and env.is_py_class_scope:
self.is_staticmethod = 1
self.analyse_argument_types(env)
if self.name == '<lambda>':
self.declare_lambda_function(env)
else:
self.declare_pyfunction(env)
self.analyse_signature(env)
self.return_type = self.entry.signature.return_type()
self.create_local_scope(env)
def analyse_argument_types(self, env):
directive_locals = self.directive_locals = env.directives['locals']
allow_none_for_extension_args = env.directives['allow_none_for_extension_args']
for arg in self.args:
if hasattr(arg, 'name'):
name_declarator = None
else:
base_type = arg.base_type.analyse(env)
name_declarator, type = \
arg.declarator.analyse(base_type, env)
arg.name = name_declarator.name
arg.type = type
self.align_argument_type(env, arg)
if name_declarator and name_declarator.cname:
error(self.pos,
"Python function argument cannot have C name specification")
arg.type = arg.type.as_argument_type()
arg.hdr_type = None
arg.needs_conversion = 0
arg.needs_type_test = 0
arg.is_generic = 1
if arg.type.is_pyobject:
if arg.or_none:
arg.accept_none = True
elif arg.not_none:
arg.accept_none = False
elif arg.type.is_extension_type or arg.type.is_builtin_type:
if arg.default and arg.default.constant_result is None:
# special case: def func(MyType obj = None)
arg.accept_none = True
else:
# default depends on compiler directive
arg.accept_none = allow_none_for_extension_args
else:
# probably just a plain 'object'
arg.accept_none = True
else:
arg.accept_none = True # won't be used, but must be there
if arg.not_none:
error(arg.pos, "Only Python type arguments can have 'not None'")
if arg.or_none:
error(arg.pos, "Only Python type arguments can have 'or None'")
def analyse_signature(self, env):
if self.entry.is_special:
if self.decorators:
error(self.pos, "special functions of cdef classes cannot have decorators")
self.entry.trivial_signature = len(self.args) == 1 and not (self.star_arg or self.starstar_arg)
elif not env.directives['always_allow_keywords'] and not (self.star_arg or self.starstar_arg):
# Use the simpler calling signature for zero- and one-argument functions.
if self.entry.signature is TypeSlots.pyfunction_signature:
if len(self.args) == 0:
self.entry.signature = TypeSlots.pyfunction_noargs
elif len(self.args) == 1:
if self.args[0].default is None and not self.args[0].kw_only:
self.entry.signature = TypeSlots.pyfunction_onearg
elif self.entry.signature is TypeSlots.pymethod_signature:
if len(self.args) == 1:
self.entry.signature = TypeSlots.unaryfunc
elif len(self.args) == 2:
if self.args[1].default is None and not self.args[1].kw_only:
self.entry.signature = TypeSlots.ibinaryfunc
sig = self.entry.signature
nfixed = sig.num_fixed_args()
if sig is TypeSlots.pymethod_signature and nfixed == 1 \
and len(self.args) == 0 and self.star_arg:
# this is the only case where a diverging number of
# arguments is not an error - when we have no explicit
# 'self' parameter as in method(*args)
sig = self.entry.signature = TypeSlots.pyfunction_signature # self is not 'really' used
self.self_in_stararg = 1
nfixed = 0
for i in range(min(nfixed, len(self.args))):
arg = self.args[i]
arg.is_generic = 0
if sig.is_self_arg(i) and not self.is_staticmethod:
if self.is_classmethod:
arg.is_type_arg = 1
arg.hdr_type = arg.type = Builtin.type_type
else:
arg.is_self_arg = 1
arg.hdr_type = arg.type = env.parent_type
arg.needs_conversion = 0
else:
arg.hdr_type = sig.fixed_arg_type(i)
if not arg.type.same_as(arg.hdr_type):
if arg.hdr_type.is_pyobject and arg.type.is_pyobject:
arg.needs_type_test = 1
else:
arg.needs_conversion = 1
if arg.needs_conversion:
arg.hdr_cname = Naming.arg_prefix + arg.name
else:
arg.hdr_cname = Naming.var_prefix + arg.name
if nfixed > len(self.args):
self.bad_signature()
return
elif nfixed < len(self.args):
if not sig.has_generic_args:
self.bad_signature()
for arg in self.args:
if arg.is_generic and \
(arg.type.is_extension_type or arg.type.is_builtin_type):
arg.needs_type_test = 1
def bad_signature(self):
sig = self.entry.signature
expected_str = "%d" % sig.num_fixed_args()
if sig.has_generic_args:
expected_str = expected_str + " or more"
name = self.name
if name.startswith("__") and name.endswith("__"):
desc = "Special method"
else:
desc = "Method"
error(self.pos,
"%s %s has wrong number of arguments "
"(%d declared, %s expected)" % (
desc, self.name, len(self.args), expected_str))
def signature_has_nongeneric_args(self):
argcount = len(self.args)
if argcount == 0 or (
argcount == 1 and (self.args[0].is_self_arg or
self.args[0].is_type_arg)):
return 0
return 1
def signature_has_generic_args(self):
return self.entry.signature.has_generic_args
def declare_pyfunction(self, env):
#print "DefNode.declare_pyfunction:", self.name, "in", env ###
name = self.name
entry = env.lookup_here(name)
if entry:
if entry.is_final_cmethod and not env.parent_type.is_final_type:
error(self.pos, "Only final types can have final Python (def/cpdef) methods")
if (entry.type.is_cfunction and not entry.is_builtin_cmethod
and not self.is_wrapper):
warning(self.pos, "Overriding cdef method with def method.", 5)
entry = env.declare_pyfunction(name, self.pos, allow_redefine=not self.is_wrapper)
self.entry = entry
prefix = env.next_id(env.scope_prefix)
entry.func_cname = Naming.pyfunc_prefix + prefix + name
entry.pymethdef_cname = Naming.pymethdef_prefix + prefix + name
if Options.docstrings:
entry.doc = embed_position(self.pos, self.doc)
entry.doc_cname = Naming.funcdoc_prefix + prefix + name
if entry.is_special:
if entry.name in TypeSlots.invisible or not entry.doc or (entry.name in '__getattr__' and env.directives['fast_getattr']):
entry.wrapperbase_cname = None
else:
entry.wrapperbase_cname = Naming.wrapperbase_prefix + prefix + name
else:
entry.doc = None
def declare_lambda_function(self, env):
entry = env.declare_lambda_function(self.lambda_name, self.pos)
entry.doc = None
self.entry = entry
def declare_arguments(self, env):
for arg in self.args:
if not arg.name:
error(arg.pos, "Missing argument name")
if arg.needs_conversion:
arg.entry = env.declare_var(arg.name, arg.type, arg.pos)
if arg.type.is_pyobject:
arg.entry.init = "0"
else:
arg.entry = self.declare_argument(env, arg)
arg.entry.is_arg = 1
arg.entry.used = 1
arg.entry.is_self_arg = arg.is_self_arg
if arg.hdr_type:
if arg.is_self_arg or arg.is_type_arg or \
(arg.type.is_extension_type and not arg.hdr_type.is_extension_type):
arg.entry.is_declared_generic = 1
self.declare_python_arg(env, self.star_arg)
self.declare_python_arg(env, self.starstar_arg)
def declare_python_arg(self, env, arg):
if arg:
if env.directives['infer_types'] != False:
type = PyrexTypes.unspecified_type
else:
type = py_object_type
entry = env.declare_var(arg.name, type, arg.pos)
entry.is_arg = 1
entry.used = 1
entry.init = "0"
entry.xdecref_cleanup = 1
arg.entry = entry
def analyse_expressions(self, env):
self.local_scope.directives = env.directives
self.analyse_default_values(env)
if self.needs_assignment_synthesis(env):
# Shouldn't we be doing this at the module level too?
self.synthesize_assignment_node(env)
elif self.decorators:
for decorator in self.decorators[::-1]:
decorator.decorator.analyse_expressions(env)
def needs_assignment_synthesis(self, env, code=None):
if self.no_assignment_synthesis:
return False
# Should enable for module level as well, that will require more testing...
if self.entry.is_anonymous:
return True
if env.is_module_scope:
if code is None:
return env.directives['binding']
else:
return code.globalstate.directives['binding']
return env.is_py_class_scope or env.is_closure_scope
def synthesize_assignment_node(self, env):
import ExprNodes
genv = env
while genv.is_py_class_scope or genv.is_c_class_scope:
genv = genv.outer_scope
if genv.is_closure_scope:
rhs = self.py_cfunc_node = ExprNodes.InnerFunctionNode(
self.pos, pymethdef_cname = self.entry.pymethdef_cname,
code_object = ExprNodes.CodeObjectNode(self))
else:
rhs = self.py_cfunc_node = ExprNodes.PyCFunctionNode(
self.pos, pymethdef_cname = self.entry.pymethdef_cname,
binding = env.directives['binding'],
code_object = ExprNodes.CodeObjectNode(self))
if env.is_py_class_scope:
if not self.is_staticmethod and not self.is_classmethod:
rhs.binding = True
else:
rhs.binding = False
if self.decorators:
for decorator in self.decorators[::-1]:
rhs = ExprNodes.SimpleCallNode(
decorator.pos,
function = decorator.decorator,
args = [rhs])
self.assmt = SingleAssignmentNode(self.pos,
lhs = ExprNodes.NameNode(self.pos, name = self.name),
rhs = rhs)
self.assmt.analyse_declarations(env)
self.assmt.analyse_expressions(env)
def generate_function_header(self, code, with_pymethdef, proto_only=0):
arg_code_list = []
sig = self.entry.signature
if sig.has_dummy_arg or self.self_in_stararg:
arg_code_list.append(
"PyObject *%s" % Naming.self_cname)
for arg in self.args:
if not arg.is_generic:
if arg.is_self_arg or arg.is_type_arg:
arg_code_list.append("PyObject *%s" % arg.hdr_cname)
else:
decl = arg.hdr_type.declaration_code(arg.hdr_cname)
entry = self.local_scope.lookup(arg.name)
if not entry.cf_used:
arg_code_list.append('CYTHON_UNUSED ' + decl)
else:
arg_code_list.append(decl)
if not self.entry.is_special and sig.method_flags() == [TypeSlots.method_noargs]:
arg_code_list.append("CYTHON_UNUSED PyObject *unused")
if (self.entry.scope.is_c_class_scope and self.entry.name == "__ipow__"):
arg_code_list.append("CYTHON_UNUSED PyObject *unused")
if sig.has_generic_args:
arg_code_list.append(
"PyObject *%s, PyObject *%s"
% (Naming.args_cname, Naming.kwds_cname))
arg_code = ", ".join(arg_code_list)
dc = self.return_type.declaration_code(self.entry.func_cname)
mf = " ".join(self.modifiers).upper()
if mf: mf += " "
header = "static %s%s(%s)" % (mf, dc, arg_code)
code.putln("%s; /*proto*/" % header)
if proto_only:
return
if (Options.docstrings and self.entry.doc and
not self.entry.scope.is_property_scope and
(not self.entry.is_special or self.entry.wrapperbase_cname)):
docstr = self.entry.doc
if docstr.is_unicode:
docstr = docstr.utf8encode()
code.putln(
'static char %s[] = "%s";' % (
self.entry.doc_cname,
split_string_literal(escape_byte_string(docstr))))
if self.entry.is_special:
code.putln(
"struct wrapperbase %s;" % self.entry.wrapperbase_cname)
if with_pymethdef:
code.put(
"static PyMethodDef %s = " %
self.entry.pymethdef_cname)
code.put_pymethoddef(self.entry, ";", allow_skip=False)
code.putln("%s {" % header)
def generate_argument_declarations(self, env, code):
for arg in self.args:
if arg.is_generic: # or arg.needs_conversion:
if arg.needs_conversion:
code.putln("PyObject *%s = 0;" % arg.hdr_cname)
elif not arg.entry.in_closure:
code.put_var_declaration(arg.entry)
def generate_keyword_list(self, code):
if self.signature_has_generic_args() and \
self.signature_has_nongeneric_args():
code.put(
"static PyObject **%s[] = {" %
Naming.pykwdlist_cname)
for arg in self.args:
if arg.is_generic:
pystring_cname = code.intern_identifier(arg.name)
code.put('&%s,' % pystring_cname)
code.putln("0};")
def generate_argument_parsing_code(self, env, code):
# Generate fast equivalent of PyArg_ParseTuple call for
# generic arguments, if any, including args/kwargs
if self.entry.signature.has_dummy_arg and not self.self_in_stararg:
# get rid of unused argument warning
code.putln("%s = %s;" % (Naming.self_cname, Naming.self_cname))
old_error_label = code.new_error_label()
our_error_label = code.error_label
end_label = code.new_label("argument_unpacking_done")
has_kwonly_args = self.num_kwonly_args > 0
has_star_or_kw_args = self.star_arg is not None \
or self.starstar_arg is not None or has_kwonly_args
for arg in self.args:
if not arg.type.is_pyobject:
if not arg.type.create_from_py_utility_code(env):
pass # will fail later
elif arg.is_self_arg and arg.entry.in_closure:
# must store 'self' in the closure explicitly for extension types
self.generate_arg_assignment(arg, arg.hdr_cname, code)
if not self.signature_has_generic_args():
if has_star_or_kw_args:
error(self.pos, "This method cannot have * or keyword arguments")
self.generate_argument_conversion_code(code)
elif not self.signature_has_nongeneric_args():
# func(*args) or func(**kw) or func(*args, **kw)
self.generate_stararg_copy_code(code)
else:
positional_args = []
kw_only_args = []
for arg in self.args:
arg_entry = arg.entry
if arg.is_generic:
if arg.default:
if not arg.is_self_arg and not arg.is_type_arg:
if arg.kw_only:
kw_only_args.append(arg)
else:
positional_args.append(arg)
elif arg.kw_only:
kw_only_args.append(arg)
elif not arg.is_self_arg and not arg.is_type_arg:
positional_args.append(arg)
self.generate_tuple_and_keyword_parsing_code(
positional_args, kw_only_args, end_label, code)
code.error_label = old_error_label
if code.label_used(our_error_label):
if not code.label_used(end_label):
code.put_goto(end_label)
code.put_label(our_error_label)
if has_star_or_kw_args:
self.generate_arg_decref(self.star_arg, code)
if self.starstar_arg:
if self.starstar_arg.entry.xdecref_cleanup:
code.put_var_xdecref_clear(self.starstar_arg.entry)
else:
code.put_var_decref_clear(self.starstar_arg.entry)
code.put_add_traceback(self.entry.qualified_name)
# The arguments are put into the closure one after the
# other, so when type errors are found, all references in
# the closure instance must be properly ref-counted to
# facilitate generic closure instance deallocation. In
# the case of an argument type error, it's best to just
# DECREF+clear the already handled references, as this
# frees their references as early as possible.
for arg in self.args:
if arg.type.is_pyobject and arg.entry.in_closure:
code.put_var_xdecref_clear(arg.entry)
if self.needs_closure:
code.put_decref(Naming.cur_scope_cname, self.local_scope.scope_class.type)
code.put_finish_refcount_context()
code.putln("return %s;" % self.error_value())
if code.label_used(end_label):
code.put_label(end_label)
# fix refnanny view on closure variables here, instead of
# doing it separately for each arg parsing special case
if self.star_arg and self.star_arg.entry.in_closure:
code.put_var_giveref(self.star_arg.entry)
if self.starstar_arg and self.starstar_arg.entry.in_closure:
code.put_var_giveref(self.starstar_arg.entry)
for arg in self.args:
if arg.type.is_pyobject and arg.entry.in_closure:
code.put_var_giveref(arg.entry)
def generate_arg_assignment(self, arg, item, code, incref_closure=True):
if arg.type.is_pyobject:
if arg.is_generic:
item = PyrexTypes.typecast(arg.type, PyrexTypes.py_object_type, item)
entry = arg.entry
if incref_closure and entry.in_closure:
code.put_incref(item, PyrexTypes.py_object_type)
code.putln("%s = %s;" % (entry.cname, item))
else:
func = arg.type.from_py_function
if func:
code.putln("%s = %s(%s); %s" % (
arg.entry.cname,
func,
item,
code.error_goto_if(arg.type.error_condition(arg.entry.cname), arg.pos)))
else:
error(arg.pos, "Cannot convert Python object argument to type '%s'" % arg.type)
def generate_arg_xdecref(self, arg, code):
if arg:
code.put_var_xdecref_clear(arg.entry)
def generate_arg_decref(self, arg, code):
if arg:
code.put_var_decref_clear(arg.entry)
def generate_stararg_copy_code(self, code):
if not self.star_arg:
code.globalstate.use_utility_code(raise_argtuple_invalid_utility_code)
code.putln("if (unlikely(PyTuple_GET_SIZE(%s) > 0)) {" %
Naming.args_cname)
code.put('__Pyx_RaiseArgtupleInvalid("%s", 1, 0, 0, PyTuple_GET_SIZE(%s)); return %s;' % (
self.name, Naming.args_cname, self.error_value()))
code.putln("}")
if self.starstar_arg:
if self.star_arg:
kwarg_check = "unlikely(%s)" % Naming.kwds_cname
else:
kwarg_check = "%s" % Naming.kwds_cname
else:
kwarg_check = "unlikely(%s) && unlikely(PyDict_Size(%s) > 0)" % (
Naming.kwds_cname, Naming.kwds_cname)
code.globalstate.use_utility_code(keyword_string_check_utility_code)
code.putln(
"if (%s && unlikely(!__Pyx_CheckKeywordStrings(%s, \"%s\", %d))) return %s;" % (
kwarg_check, Naming.kwds_cname, self.name,
bool(self.starstar_arg), self.error_value()))
if self.starstar_arg:
code.putln("%s = (%s) ? PyDict_Copy(%s) : PyDict_New();" % (
self.starstar_arg.entry.cname,
Naming.kwds_cname,
Naming.kwds_cname))
code.putln("if (unlikely(!%s)) return %s;" % (
self.starstar_arg.entry.cname, self.error_value()))
self.starstar_arg.entry.xdecref_cleanup = 0
code.put_gotref(self.starstar_arg.entry.cname)
if self.self_in_stararg:
# need to create a new tuple with 'self' inserted as first item
code.put("%s = PyTuple_New(PyTuple_GET_SIZE(%s)+1); if (unlikely(!%s)) " % (
self.star_arg.entry.cname,
Naming.args_cname,
self.star_arg.entry.cname))
if self.starstar_arg:
code.putln("{")
code.put_decref_clear(self.starstar_arg.entry.cname, py_object_type)
code.putln("return %s;" % self.error_value())
code.putln("}")
else:
code.putln("return %s;" % self.error_value())
code.put_gotref(self.star_arg.entry.cname)
code.put_incref(Naming.self_cname, py_object_type)
code.put_giveref(Naming.self_cname)
code.putln("PyTuple_SET_ITEM(%s, 0, %s);" % (
self.star_arg.entry.cname, Naming.self_cname))
temp = code.funcstate.allocate_temp(PyrexTypes.c_py_ssize_t_type, manage_ref=False)
code.putln("for (%s=0; %s < PyTuple_GET_SIZE(%s); %s++) {" % (
temp, temp, Naming.args_cname, temp))
code.putln("PyObject* item = PyTuple_GET_ITEM(%s, %s);" % (
Naming.args_cname, temp))
code.put_incref("item", py_object_type)
code.put_giveref("item")
code.putln("PyTuple_SET_ITEM(%s, %s+1, item);" % (
self.star_arg.entry.cname, temp))
code.putln("}")
code.funcstate.release_temp(temp)
self.star_arg.entry.xdecref_cleanup = 0
elif self.star_arg:
code.put_incref(Naming.args_cname, py_object_type)
code.putln("%s = %s;" % (
self.star_arg.entry.cname,
Naming.args_cname))
self.star_arg.entry.xdecref_cleanup = 0
def generate_tuple_and_keyword_parsing_code(self, positional_args,
kw_only_args, success_label, code):
argtuple_error_label = code.new_label("argtuple_error")
min_positional_args = self.num_required_args - self.num_required_kw_args
if len(self.args) > 0 and (self.args[0].is_self_arg or self.args[0].is_type_arg):
min_positional_args -= 1
max_positional_args = len(positional_args)
has_fixed_positional_count = not self.star_arg and \
min_positional_args == max_positional_args
has_kw_only_args = bool(kw_only_args)
if self.num_required_kw_args:
code.globalstate.use_utility_code(raise_keyword_required_utility_code)
if self.starstar_arg or self.star_arg:
self.generate_stararg_init_code(max_positional_args, code)
# Before being converted and assigned to the target variables,
# borrowed references to all unpacked argument values are
# collected into a local PyObject* array, regardless if they
# were taken from default arguments, positional arguments or
# keyword arguments.
code.putln('{')
all_args = tuple(positional_args) + tuple(kw_only_args)
self.generate_argument_values_setup_code(
all_args, max_positional_args, argtuple_error_label, code)
# --- optimised code when we receive keyword arguments
code.putln("if (%s(%s)) {" % (
(self.num_required_kw_args > 0) and "likely" or "unlikely",
Naming.kwds_cname))
self.generate_keyword_unpacking_code(
min_positional_args, max_positional_args,
has_fixed_positional_count, has_kw_only_args,
all_args, argtuple_error_label, code)
# --- optimised code when we do not receive any keyword arguments
if (self.num_required_kw_args and min_positional_args > 0) or min_positional_args == max_positional_args:
# Python raises arg tuple related errors first, so we must
# check the length here
if min_positional_args == max_positional_args and not self.star_arg:
compare = '!='
else:
compare = '<'
code.putln('} else if (PyTuple_GET_SIZE(%s) %s %d) {' % (
Naming.args_cname, compare, min_positional_args))
code.put_goto(argtuple_error_label)
if self.num_required_kw_args:
# pure error case: keywords required but not passed
if max_positional_args > min_positional_args and not self.star_arg:
code.putln('} else if (PyTuple_GET_SIZE(%s) > %d) {' % (
Naming.args_cname, max_positional_args))
code.put_goto(argtuple_error_label)
code.putln('} else {')
for i, arg in enumerate(kw_only_args):
if not arg.default:
pystring_cname = code.intern_identifier(arg.name)
# required keyword-only argument missing
code.put('__Pyx_RaiseKeywordRequired("%s", %s); ' % (
self.name,
pystring_cname))
code.putln(code.error_goto(self.pos))
break
else:
# optimised tuple unpacking code
code.putln('} else {')
if min_positional_args == max_positional_args:
# parse the exact number of positional arguments from
# the args tuple
for i, arg in enumerate(positional_args):
code.putln("values[%d] = PyTuple_GET_ITEM(%s, %d);" % (i, Naming.args_cname, i))
else:
# parse the positional arguments from the variable length
# args tuple and reject illegal argument tuple sizes
code.putln('switch (PyTuple_GET_SIZE(%s)) {' % Naming.args_cname)
if self.star_arg:
code.putln('default:')
reversed_args = list(enumerate(positional_args))[::-1]
for i, arg in reversed_args:
if i >= min_positional_args-1:
code.put('case %2d: ' % (i+1))
code.putln("values[%d] = PyTuple_GET_ITEM(%s, %d);" % (i, Naming.args_cname, i))
if min_positional_args == 0:
code.put('case 0: ')
code.putln('break;')
if self.star_arg:
if min_positional_args:
for i in range(min_positional_args-1, -1, -1):
code.putln('case %2d:' % i)
code.put_goto(argtuple_error_label)
else:
code.put('default: ')
code.put_goto(argtuple_error_label)
code.putln('}')
code.putln('}')
# convert arg values to their final type and assign them
for i, arg in enumerate(all_args):
if arg.default and not arg.type.is_pyobject:
code.putln("if (values[%d]) {" % i)
self.generate_arg_assignment(arg, "values[%d]" % i, code)
if arg.default and not arg.type.is_pyobject:
code.putln('} else {')
code.putln(
"%s = %s;" % (
arg.entry.cname,
arg.calculate_default_value_code(code)))
if arg.entry.type.is_memoryviewslice:
code.put_incref_memoryviewslice(arg.entry.cname,
have_gil=True)
code.putln('}')
code.putln('}')
if code.label_used(argtuple_error_label):
code.put_goto(success_label)
code.put_label(argtuple_error_label)
code.globalstate.use_utility_code(raise_argtuple_invalid_utility_code)
code.put('__Pyx_RaiseArgtupleInvalid("%s", %d, %d, %d, PyTuple_GET_SIZE(%s)); ' % (
self.name, has_fixed_positional_count,
min_positional_args, max_positional_args,
Naming.args_cname))
code.putln(code.error_goto(self.pos))
def generate_arg_default_assignments(self, code):
for arg in self.args:
if arg.is_generic and arg.default:
code.putln(
"%s = %s;" % (
arg.entry.cname,
arg.calculate_default_value_code(code)))
if arg.type.is_memoryviewslice:
code.put_incref_memoryviewslice(arg.entry.cname,
have_gil=True)
def generate_stararg_init_code(self, max_positional_args, code):
if self.starstar_arg:
self.starstar_arg.entry.xdecref_cleanup = 0
code.putln('%s = PyDict_New(); if (unlikely(!%s)) return %s;' % (
self.starstar_arg.entry.cname,
self.starstar_arg.entry.cname,
self.error_value()))
code.put_gotref(self.starstar_arg.entry.cname)
if self.star_arg:
self.star_arg.entry.xdecref_cleanup = 0
code.putln('if (PyTuple_GET_SIZE(%s) > %d) {' % (
Naming.args_cname,
max_positional_args))
code.putln('%s = PyTuple_GetSlice(%s, %d, PyTuple_GET_SIZE(%s));' % (
self.star_arg.entry.cname, Naming.args_cname,
max_positional_args, Naming.args_cname))
code.putln("if (unlikely(!%s)) {" % self.star_arg.entry.cname)
if self.starstar_arg:
code.put_decref_clear(self.starstar_arg.entry.cname, py_object_type)
if self.needs_closure:
code.put_decref(Naming.cur_scope_cname, self.local_scope.scope_class.type)
code.put_finish_refcount_context()
code.putln('return %s;' % self.error_value())
code.putln('}')
code.put_gotref(self.star_arg.entry.cname)
code.putln('} else {')
code.put("%s = %s; " % (self.star_arg.entry.cname, Naming.empty_tuple))
code.put_incref(Naming.empty_tuple, py_object_type)
code.putln('}')
def generate_argument_values_setup_code(self, args, max_positional_args, argtuple_error_label, code):
max_args = len(args)
# the 'values' array collects borrowed references to arguments
# before doing any type coercion etc.
code.putln("PyObject* values[%d] = {%s};" % (
max_args, ','.join('0'*max_args)))
# assign borrowed Python default values to the values array,
# so that they can be overwritten by received arguments below
for i, arg in enumerate(args):
if arg.default and arg.type.is_pyobject:
default_value = arg.calculate_default_value_code(code)
code.putln('values[%d] = %s;' % (i, arg.type.as_pyobject(default_value)))
def generate_keyword_unpacking_code(self, min_positional_args, max_positional_args,
has_fixed_positional_count, has_kw_only_args,
all_args, argtuple_error_label, code):
code.putln('Py_ssize_t kw_args;')
code.putln('const Py_ssize_t pos_args = PyTuple_GET_SIZE(%s);' % Naming.args_cname)
# copy the values from the args tuple and check that it's not too long
code.putln('switch (pos_args) {')
if self.star_arg:
code.putln('default:')
for i in range(max_positional_args-1, -1, -1):
code.put('case %2d: ' % (i+1))
code.putln("values[%d] = PyTuple_GET_ITEM(%s, %d);" % (
i, Naming.args_cname, i))
code.putln('case 0: break;')
if not self.star_arg:
code.put('default: ') # more arguments than allowed
code.put_goto(argtuple_error_label)
code.putln('}')
# The code above is very often (but not always) the same as
# the optimised non-kwargs tuple unpacking code, so we keep
# the code block above at the very top, before the following
# 'external' PyDict_Size() call, to make it easy for the C
# compiler to merge the two separate tuple unpacking
# implementations into one when they turn out to be identical.
# If we received kwargs, fill up the positional/required
# arguments with values from the kw dict
code.putln('kw_args = PyDict_Size(%s);' % Naming.kwds_cname)
if self.num_required_args or max_positional_args > 0:
last_required_arg = -1
for i, arg in enumerate(all_args):
if not arg.default:
last_required_arg = i
if last_required_arg < max_positional_args:
last_required_arg = max_positional_args-1
if max_positional_args > 0:
code.putln('switch (pos_args) {')
for i, arg in enumerate(all_args[:last_required_arg+1]):
if max_positional_args > 0 and i <= max_positional_args:
if self.star_arg and i == max_positional_args:
code.putln('default:')
else:
code.putln('case %2d:' % i)
pystring_cname = code.intern_identifier(arg.name)
if arg.default:
if arg.kw_only:
# handled separately below
continue
code.putln('if (kw_args > 0) {')
code.putln('PyObject* value = PyDict_GetItem(%s, %s);' % (
Naming.kwds_cname, pystring_cname))
code.putln('if (value) { values[%d] = value; kw_args--; }' % i)
code.putln('}')
else:
code.putln('values[%d] = PyDict_GetItem(%s, %s);' % (
i, Naming.kwds_cname, pystring_cname))
code.putln('if (likely(values[%d])) kw_args--;' % i);
if i < min_positional_args:
if i == 0:
# special case: we know arg 0 is missing
code.put('else ')
code.put_goto(argtuple_error_label)
else:
# print the correct number of values (args or
# kwargs) that were passed into positional
# arguments up to this point
code.putln('else {')
code.globalstate.use_utility_code(raise_argtuple_invalid_utility_code)
code.put('__Pyx_RaiseArgtupleInvalid("%s", %d, %d, %d, %d); ' % (
self.name, has_fixed_positional_count,
min_positional_args, max_positional_args, i))
code.putln(code.error_goto(self.pos))
code.putln('}')
elif arg.kw_only:
code.putln('else {')
code.put('__Pyx_RaiseKeywordRequired("%s", %s); ' %(
self.name, pystring_cname))
code.putln(code.error_goto(self.pos))
code.putln('}')
if max_positional_args > 0:
code.putln('}')
if has_kw_only_args and not self.starstar_arg:
# unpack optional keyword-only arguments
# checking for interned strings in a dict is faster than iterating
# but it's too likely that we must iterate if we expect **kwargs
optional_args = []
for i, arg in enumerate(all_args[max_positional_args:]):
if not arg.kw_only or not arg.default:
continue
optional_args.append((i+max_positional_args, arg))
if optional_args:
# this mimics an unrolled loop so that we can "break" out of it
code.putln('while (kw_args > 0) {')
code.putln('PyObject* value;')
for i, arg in optional_args:
pystring_cname = code.intern_identifier(arg.name)
code.putln(
'value = PyDict_GetItem(%s, %s);' % (
Naming.kwds_cname, pystring_cname))
code.putln(
'if (value) { values[%d] = value; if (!(--kw_args)) break; }' % i)
code.putln('break;')
code.putln('}')
code.putln('if (unlikely(kw_args > 0)) {')
# non-positional/-required kw args left in dict: default args,
# kw-only args, **kwargs or error
#
# This is sort of a catch-all: except for checking required
# arguments, this will always do the right thing for unpacking
# keyword arguments, so that we can concentrate on optimising
# common cases above.
if max_positional_args == 0:
pos_arg_count = "0"
elif self.star_arg:
code.putln("const Py_ssize_t used_pos_args = (pos_args < %d) ? pos_args : %d;" % (
max_positional_args, max_positional_args))
pos_arg_count = "used_pos_args"
else:
pos_arg_count = "pos_args"
code.globalstate.use_utility_code(parse_keywords_utility_code)
code.putln(
'if (unlikely(__Pyx_ParseOptionalKeywords(%s, %s, %s, values, %s, "%s") < 0)) %s' % (
Naming.kwds_cname,
Naming.pykwdlist_cname,
self.starstar_arg and self.starstar_arg.entry.cname or '0',
pos_arg_count,
self.name,
code.error_goto(self.pos)))
code.putln('}')
# convert arg values to their final type and assign them
for i, arg in enumerate(all_args):
if arg.default and not arg.type.is_pyobject:
code.putln("if (values[%d]) {" % i)
self.generate_arg_assignment(arg, "values[%d]" % i, code, incref_closure=False)
if arg.default and not arg.type.is_pyobject:
code.putln('} else {')
code.putln(
"%s = %s;" % (
arg.entry.cname,
arg.calculate_default_value_code(code)))
if arg.type.is_memoryviewslice:
code.put_incref_memoryviewslice(arg.entry.cname,
have_gil=True)
code.putln('}')
def generate_argument_conversion_code(self, code):
# Generate code to convert arguments from signature type to
# declared type, if needed. Also copies signature arguments
# into closure fields.
for arg in self.args:
if arg.needs_conversion:
self.generate_arg_conversion(arg, code)
elif not arg.is_self_arg and arg.entry.in_closure:
if arg.type.is_pyobject:
code.put_incref(arg.hdr_cname, py_object_type)
code.putln('%s = %s;' % (arg.entry.cname, arg.hdr_cname))
def generate_arg_conversion(self, arg, code):
# Generate conversion code for one argument.
old_type = arg.hdr_type
new_type = arg.type
if old_type.is_pyobject:
if arg.default:
code.putln("if (%s) {" % arg.hdr_cname)
else:
code.putln("assert(%s); {" % arg.hdr_cname)
self.generate_arg_conversion_from_pyobject(arg, code)
code.putln("}")
elif new_type.is_pyobject:
self.generate_arg_conversion_to_pyobject(arg, code)
else:
if new_type.assignable_from(old_type):
code.putln(
"%s = %s;" % (arg.entry.cname, arg.hdr_cname))
else:
error(arg.pos,
"Cannot convert 1 argument from '%s' to '%s'" %
(old_type, new_type))
def generate_arg_conversion_from_pyobject(self, arg, code):
new_type = arg.type
func = new_type.from_py_function
# copied from CoerceFromPyTypeNode
if func:
lhs = arg.entry.cname
rhs = "%s(%s)" % (func, arg.hdr_cname)
if new_type.is_enum:
rhs = PyrexTypes.typecast(new_type, PyrexTypes.c_long_type, rhs)
code.putln("%s = %s; %s" % (
lhs,
rhs,
code.error_goto_if(new_type.error_condition(arg.entry.cname), arg.pos)))
else:
error(arg.pos,
"Cannot convert Python object argument to type '%s'"
% new_type)
def generate_arg_conversion_to_pyobject(self, arg, code):
old_type = arg.hdr_type
func = old_type.to_py_function
if func:
code.putln("%s = %s(%s); %s" % (
arg.entry.cname,
func,
arg.hdr_cname,
code.error_goto_if_null(arg.entry.cname, arg.pos)))
code.put_var_gotref(arg.entry)
else:
error(arg.pos,
"Cannot convert argument of type '%s' to Python object"
% old_type)
def generate_argument_type_tests(self, code):
# Generate type tests for args whose signature
# type is PyObject * and whose declared type is
# a subtype thereof.
for arg in self.args:
if arg.needs_type_test:
self.generate_arg_type_test(arg, code)
elif not arg.accept_none and arg.type.is_pyobject:
self.generate_arg_none_check(arg, code)
def error_value(self):
return self.entry.signature.error_value
def caller_will_check_exceptions(self):
return 1
class GeneratorDefNode(DefNode):
# Generator DefNode.
#
# gbody GeneratorBodyDefNode
#
is_generator = True
needs_closure = True
child_attrs = DefNode.child_attrs + ["gbody"]
def __init__(self, **kwargs):
# XXX: don't actually needs a body
kwargs['body'] = StatListNode(kwargs['pos'], stats=[])
super(GeneratorDefNode, self).__init__(**kwargs)
def analyse_declarations(self, env):
super(GeneratorDefNode, self).analyse_declarations(env)
self.gbody.local_scope = self.local_scope
self.gbody.analyse_declarations(env)
def generate_function_body(self, env, code):
body_cname = self.gbody.entry.func_cname
generator_cname = '%s->%s' % (Naming.cur_scope_cname, Naming.obj_base_cname)
code.putln('%s.resume_label = 0;' % generator_cname)
code.putln('%s.body = (__pyx_generator_body_t) %s;' % (generator_cname, body_cname))
code.put_giveref(Naming.cur_scope_cname)
code.put_finish_refcount_context()
code.putln("return (PyObject *) %s;" % Naming.cur_scope_cname);
def generate_function_definitions(self, env, code):
from ExprNodes import generator_utility_code
env.use_utility_code(generator_utility_code)
self.gbody.generate_function_header(code, proto=True)
super(GeneratorDefNode, self).generate_function_definitions(env, code)
self.gbody.generate_function_definitions(env, code)
class GeneratorBodyDefNode(DefNode):
# Generator body DefNode.
#
is_generator_body = True
def __init__(self, pos=None, name=None, body=None):
super(GeneratorBodyDefNode, self).__init__(pos=pos, body=body, name=name, doc=None,
args=[],
star_arg=None, starstar_arg=None)
def declare_generator_body(self, env):
prefix = env.next_id(env.scope_prefix)
name = env.next_id('generator')
cname = Naming.genbody_prefix + prefix + name
entry = env.declare_var(None, py_object_type, self.pos,
cname=cname, visibility='private')
entry.func_cname = cname
entry.qualified_name = EncodedString(self.name)
self.entry = entry
def analyse_declarations(self, env):
self.analyse_argument_types(env)
self.declare_generator_body(env)
def generate_function_header(self, code, proto=False):
header = "static PyObject *%s(%s, PyObject *%s)" % (
self.entry.func_cname,
self.local_scope.scope_class.type.declaration_code(Naming.cur_scope_cname),
Naming.sent_value_cname)
if proto:
code.putln('%s; /* proto */' % header)
else:
code.putln('%s /* generator body */\n{' % header);
def generate_function_definitions(self, env, code):
lenv = self.local_scope
# Generate closure function definitions
self.body.generate_function_definitions(lenv, code)
# Generate C code for header and body of function
code.enter_cfunc_scope()
code.return_from_error_cleanup_label = code.new_label()
# ----- Top-level constants used by this function
code.mark_pos(self.pos)
self.generate_cached_builtins_decls(lenv, code)
# ----- Function header
code.putln("")
self.generate_function_header(code)
# ----- Local variables
code.putln("PyObject *%s = NULL;" % Naming.retval_cname)
tempvardecl_code = code.insertion_point()
code.put_declare_refcount_context()
code.put_setup_refcount_context(self.entry.name)
# ----- Resume switch point.
code.funcstate.init_closure_temps(lenv.scope_class.type.scope)
resume_code = code.insertion_point()
first_run_label = code.new_label('first_run')
code.use_label(first_run_label)
code.put_label(first_run_label)
code.putln('%s' %
(code.error_goto_if_null(Naming.sent_value_cname, self.pos)))
# ----- Function body
self.generate_function_body(env, code)
code.putln('PyErr_SetNone(PyExc_StopIteration); %s' % code.error_goto(self.pos))
# ----- Error cleanup
if code.error_label in code.labels_used:
code.put_goto(code.return_label)
code.put_label(code.error_label)
for cname, type in code.funcstate.all_managed_temps():
code.put_xdecref(cname, type)
code.put_add_traceback(self.entry.qualified_name)
# ----- Non-error return cleanup
code.put_label(code.return_label)
code.put_xdecref(Naming.retval_cname, py_object_type)
code.putln('%s->%s.resume_label = -1;' % (Naming.cur_scope_cname, Naming.obj_base_cname))
code.put_finish_refcount_context()
code.putln('return NULL;');
code.putln("}")
# ----- Go back and insert temp variable declarations
tempvardecl_code.put_temp_declarations(code.funcstate)
# ----- Generator resume code
resume_code.putln("switch (%s->%s.resume_label) {" % (Naming.cur_scope_cname, Naming.obj_base_cname));
resume_code.putln("case 0: goto %s;" % first_run_label)
from ParseTreeTransforms import YieldNodeCollector
collector = YieldNodeCollector()
collector.visitchildren(self)
for yield_expr in collector.yields:
resume_code.putln("case %d: goto %s;" % (yield_expr.label_num, yield_expr.label_name));
resume_code.putln("default: /* CPython raises the right error here */");
resume_code.put_finish_refcount_context()
resume_code.putln("return NULL;");
resume_code.putln("}");
code.exit_cfunc_scope()
class OverrideCheckNode(StatNode):
# A Node for dispatching to the def method if it
# is overriden.
#
# py_func
#
# args
# func_temp
# body
child_attrs = ['body']
body = None
def analyse_expressions(self, env):
self.args = env.arg_entries
if self.py_func.is_module_scope:
first_arg = 0
else:
first_arg = 1
import ExprNodes
self.func_node = ExprNodes.RawCNameExprNode(self.pos, py_object_type)
call_tuple = ExprNodes.TupleNode(self.pos, args=[ExprNodes.NameNode(self.pos, name=arg.name) for arg in self.args[first_arg:]])
call_node = ExprNodes.SimpleCallNode(self.pos,
function=self.func_node,
args=[ExprNodes.NameNode(self.pos, name=arg.name) for arg in self.args[first_arg:]])
self.body = ReturnStatNode(self.pos, value=call_node)
self.body.analyse_expressions(env)
def generate_execution_code(self, code):
interned_attr_cname = code.intern_identifier(self.py_func.entry.name)
# Check to see if we are an extension type
if self.py_func.is_module_scope:
self_arg = "((PyObject *)%s)" % Naming.module_cname
else:
self_arg = "((PyObject *)%s)" % self.args[0].cname
code.putln("/* Check if called by wrapper */")
code.putln("if (unlikely(%s)) ;" % Naming.skip_dispatch_cname)
code.putln("/* Check if overriden in Python */")
if self.py_func.is_module_scope:
code.putln("else {")
else:
code.putln("else if (unlikely(Py_TYPE(%s)->tp_dictoffset != 0)) {" % self_arg)
func_node_temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
self.func_node.set_cname(func_node_temp)
# need to get attribute manually--scope would return cdef method
err = code.error_goto_if_null(func_node_temp, self.pos)
code.putln("%s = PyObject_GetAttr(%s, %s); %s" % (
func_node_temp, self_arg, interned_attr_cname, err))
code.put_gotref(func_node_temp)
is_builtin_function_or_method = "PyCFunction_Check(%s)" % func_node_temp
is_overridden = "(PyCFunction_GET_FUNCTION(%s) != (void *)&%s)" % (
func_node_temp, self.py_func.entry.func_cname)
code.putln("if (!%s || %s) {" % (is_builtin_function_or_method, is_overridden))
self.body.generate_execution_code(code)
code.putln("}")
code.put_decref_clear(func_node_temp, PyrexTypes.py_object_type)
code.funcstate.release_temp(func_node_temp)
code.putln("}")
class ClassDefNode(StatNode, BlockNode):
pass
class PyClassDefNode(ClassDefNode):
# A Python class definition.
#
# name EncodedString Name of the class
# doc string or None
# body StatNode Attribute definition code
# entry Symtab.Entry
# scope PyClassScope
# decorators [DecoratorNode] list of decorators or None
#
# The following subnodes are constructed internally:
#
# dict DictNode Class dictionary or Py3 namespace
# classobj ClassNode Class object
# target NameNode Variable to assign class object to
child_attrs = ["body", "dict", "metaclass", "mkw", "bases", "class_result", "target"]
decorators = None
class_result = None
py3_style_class = False # Python3 style class (bases+kwargs)
def __init__(self, pos, name, bases, doc, body, decorators = None,
keyword_args = None, starstar_arg = None):
StatNode.__init__(self, pos)
self.name = name
self.doc = doc
self.body = body
self.decorators = decorators
import ExprNodes
if self.doc and Options.docstrings:
doc = embed_position(self.pos, self.doc)
doc_node = ExprNodes.StringNode(pos, value = doc)
else:
doc_node = None
if keyword_args or starstar_arg:
self.py3_style_class = True
self.bases = bases
self.metaclass = None
if keyword_args and not starstar_arg:
for i, item in list(enumerate(keyword_args.key_value_pairs))[::-1]:
if item.key.value == 'metaclass':
if self.metaclass is not None:
error(item.pos, "keyword argument 'metaclass' passed multiple times")
# special case: we already know the metaclass,
# so we don't need to do the "build kwargs,
# find metaclass" dance at runtime
self.metaclass = item.value
del keyword_args.key_value_pairs[i]
if starstar_arg:
self.mkw = ExprNodes.KeywordArgsNode(
pos, keyword_args = keyword_args and keyword_args.key_value_pairs or [],
starstar_arg = starstar_arg)
elif keyword_args and keyword_args.key_value_pairs:
self.mkw = keyword_args
else:
self.mkw = ExprNodes.NullNode(pos)
if self.metaclass is None:
self.metaclass = ExprNodes.PyClassMetaclassNode(
pos, mkw = self.mkw, bases = self.bases)
self.dict = ExprNodes.PyClassNamespaceNode(pos, name = name,
doc = doc_node, metaclass = self.metaclass, bases = self.bases,
mkw = self.mkw)
self.classobj = ExprNodes.Py3ClassNode(pos, name = name,
bases = self.bases, dict = self.dict, doc = doc_node,
metaclass = self.metaclass, mkw = self.mkw)
else:
self.dict = ExprNodes.DictNode(pos, key_value_pairs = [])
self.metaclass = None
self.mkw = None
self.bases = None
self.classobj = ExprNodes.ClassNode(pos, name = name,
bases = bases, dict = self.dict, doc = doc_node)
self.target = ExprNodes.NameNode(pos, name = name)
def as_cclass(self):
"""
Return this node as if it were declared as an extension class
"""
if self.py3_style_class:
error(self.classobj.pos, "Python3 style class could not be represented as C class")
return
bases = self.classobj.bases.args
if len(bases) == 0:
base_class_name = None
base_class_module = None
elif len(bases) == 1:
base = bases[0]
path = []
from ExprNodes import AttributeNode, NameNode
while isinstance(base, AttributeNode):
path.insert(0, base.attribute)
base = base.obj
if isinstance(base, NameNode):
path.insert(0, base.name)
base_class_name = path[-1]
if len(path) > 1:
base_class_module = u'.'.join(path[:-1])
else:
base_class_module = None
else:
error(self.classobj.bases.args.pos, "Invalid base class")
else:
error(self.classobj.bases.args.pos, "C class may only have one base class")
return None
return CClassDefNode(self.pos,
visibility = 'private',
module_name = None,
class_name = self.name,
base_class_module = base_class_module,
base_class_name = base_class_name,
decorators = self.decorators,
body = self.body,
in_pxd = False,
doc = self.doc)
def create_scope(self, env):
genv = env
while genv.is_py_class_scope or genv.is_c_class_scope:
genv = genv.outer_scope
cenv = self.scope = PyClassScope(name = self.name, outer_scope = genv)
return cenv
def analyse_declarations(self, env):
class_result = self.classobj
if self.decorators:
from ExprNodes import SimpleCallNode
for decorator in self.decorators[::-1]:
class_result = SimpleCallNode(
decorator.pos,
function = decorator.decorator,
args = [class_result])
self.class_result = class_result
self.class_result.analyse_declarations(env)
self.target.analyse_target_declaration(env)
cenv = self.create_scope(env)
cenv.directives = env.directives
cenv.class_obj_cname = self.target.entry.cname
self.body.analyse_declarations(cenv)
def analyse_expressions(self, env):
if self.py3_style_class:
self.bases.analyse_expressions(env)
self.metaclass.analyse_expressions(env)
self.mkw.analyse_expressions(env)
self.dict.analyse_expressions(env)
self.class_result.analyse_expressions(env)
genv = env.global_scope()
cenv = self.scope
self.body.analyse_expressions(cenv)
self.target.analyse_target_expression(env, self.classobj)
def generate_function_definitions(self, env, code):
self.generate_lambda_definitions(self.scope, code)
self.body.generate_function_definitions(self.scope, code)
def generate_execution_code(self, code):
code.pyclass_stack.append(self)
cenv = self.scope
if self.py3_style_class:
self.bases.generate_evaluation_code(code)
self.mkw.generate_evaluation_code(code)
self.metaclass.generate_evaluation_code(code)
self.dict.generate_evaluation_code(code)
cenv.namespace_cname = cenv.class_obj_cname = self.dict.result()
self.body.generate_execution_code(code)
self.class_result.generate_evaluation_code(code)
cenv.namespace_cname = cenv.class_obj_cname = self.classobj.result()
self.target.generate_assignment_code(self.class_result, code)
self.dict.generate_disposal_code(code)
self.dict.free_temps(code)
if self.py3_style_class:
self.mkw.generate_disposal_code(code)
self.mkw.free_temps(code)
self.metaclass.generate_disposal_code(code)
self.metaclass.free_temps(code)
self.bases.generate_disposal_code(code)
self.bases.free_temps(code)
code.pyclass_stack.pop()
class CClassDefNode(ClassDefNode):
# An extension type definition.
#
# visibility 'private' or 'public' or 'extern'
# typedef_flag boolean
# api boolean
# module_name string or None For import of extern type objects
# class_name string Unqualified name of class
# as_name string or None Name to declare as in this scope
# base_class_module string or None Module containing the base class
# base_class_name string or None Name of the base class
# objstruct_name string or None Specified C name of object struct
# typeobj_name string or None Specified C name of type object
# in_pxd boolean Is in a .pxd file
# decorators [DecoratorNode] list of decorators or None
# doc string or None
# body StatNode or None
# entry Symtab.Entry
# base_type PyExtensionType or None
# buffer_defaults_node DictNode or None Declares defaults for a buffer
# buffer_defaults_pos
child_attrs = ["body"]
buffer_defaults_node = None
buffer_defaults_pos = None
typedef_flag = False
api = False
objstruct_name = None
typeobj_name = None
decorators = None
shadow = False
def buffer_defaults(self, env):
if not hasattr(self, '_buffer_defaults'):
import Buffer
if self.buffer_defaults_node:
self._buffer_defaults = Buffer.analyse_buffer_options(
self.buffer_defaults_pos,
env, [], self.buffer_defaults_node,
need_complete=False)
else:
self._buffer_defaults = None
return self._buffer_defaults
def declare(self, env):
if self.module_name and self.visibility != 'extern':
module_path = self.module_name.split(".")
home_scope = env.find_imported_module(module_path, self.pos)
if not home_scope:
return None
else:
home_scope = env
self.entry = home_scope.declare_c_class(
name = self.class_name,
pos = self.pos,
defining = 0,
implementing = 0,
module_name = self.module_name,
base_type = None,
objstruct_cname = self.objstruct_name,
typeobj_cname = self.typeobj_name,
visibility = self.visibility,
typedef_flag = self.typedef_flag,
api = self.api,
buffer_defaults = self.buffer_defaults(env),
shadow = self.shadow)
def analyse_declarations(self, env):
#print "CClassDefNode.analyse_declarations:", self.class_name
#print "...visibility =", self.visibility
#print "...module_name =", self.module_name
if env.in_cinclude and not self.objstruct_name:
error(self.pos, "Object struct name specification required for "
"C class defined in 'extern from' block")
if self.decorators:
error(self.pos,
"Decorators not allowed on cdef classes (used on type '%s')" % self.class_name)
self.base_type = None
# Now that module imports are cached, we need to
# import the modules for extern classes.
if self.module_name:
self.module = None
for module in env.cimported_modules:
if module.name == self.module_name:
self.module = module
if self.module is None:
self.module = ModuleScope(self.module_name, None, env.context)
self.module.has_extern_class = 1
env.add_imported_module(self.module)
if self.base_class_name:
if self.base_class_module:
base_class_scope = env.find_module(self.base_class_module, self.pos)
else:
base_class_scope = env
if self.base_class_name == 'object':
# extension classes are special and don't need to inherit from object
if base_class_scope is None or base_class_scope.lookup('object') is None:
self.base_class_name = None
self.base_class_module = None
base_class_scope = None
if base_class_scope:
base_class_entry = base_class_scope.find(self.base_class_name, self.pos)
if base_class_entry:
if not base_class_entry.is_type:
error(self.pos, "'%s' is not a type name" % self.base_class_name)
elif not base_class_entry.type.is_extension_type and \
not (base_class_entry.type.is_builtin_type and \
base_class_entry.type.objstruct_cname):
error(self.pos, "'%s' is not an extension type" % self.base_class_name)
elif not base_class_entry.type.is_complete():
error(self.pos, "Base class '%s' of type '%s' is incomplete" % (
self.base_class_name, self.class_name))
elif base_class_entry.type.scope and base_class_entry.type.scope.directives and \
base_class_entry.type.is_final_type:
error(self.pos, "Base class '%s' of type '%s' is final" % (
self.base_class_name, self.class_name))
elif base_class_entry.type.is_builtin_type and \
base_class_entry.type.name in ('tuple', 'str', 'bytes'):
error(self.pos, "inheritance from PyVarObject types like '%s' is not currently supported"
% base_class_entry.type.name)
else:
self.base_type = base_class_entry.type
has_body = self.body is not None
if self.module_name and self.visibility != 'extern':
module_path = self.module_name.split(".")
home_scope = env.find_imported_module(module_path, self.pos)
if not home_scope:
return
else:
home_scope = env
if self.visibility == 'extern':
if (self.module_name == '__builtin__' and
self.class_name in Builtin.builtin_types and
env.qualified_name[:8] != 'cpython.'): # allow overloaded names for cimporting from cpython
warning(self.pos, "%s already a builtin Cython type" % self.class_name, 1)
self.entry = home_scope.declare_c_class(
name = self.class_name,
pos = self.pos,
defining = has_body and self.in_pxd,
implementing = has_body and not self.in_pxd,
module_name = self.module_name,
base_type = self.base_type,
objstruct_cname = self.objstruct_name,
typeobj_cname = self.typeobj_name,
visibility = self.visibility,
typedef_flag = self.typedef_flag,
api = self.api,
buffer_defaults = self.buffer_defaults(env),
shadow = self.shadow)
if self.shadow:
home_scope.lookup(self.class_name).as_variable = self.entry
if home_scope is not env and self.visibility == 'extern':
env.add_imported_entry(self.class_name, self.entry, self.pos)
self.scope = scope = self.entry.type.scope
if scope is not None:
scope.directives = env.directives
if self.doc and Options.docstrings:
scope.doc = embed_position(self.pos, self.doc)
if has_body:
self.body.analyse_declarations(scope)
if self.in_pxd:
scope.defined = 1
else:
scope.implemented = 1
env.allocate_vtable_names(self.entry)
def analyse_expressions(self, env):
if self.body:
scope = self.entry.type.scope
self.body.analyse_expressions(scope)
def generate_function_definitions(self, env, code):
if self.body:
self.generate_lambda_definitions(self.scope, code)
self.body.generate_function_definitions(self.scope, code)
def generate_execution_code(self, code):
# This is needed to generate evaluation code for
# default values of method arguments.
if self.body:
self.body.generate_execution_code(code)
def annotate(self, code):
if self.body:
self.body.annotate(code)
class PropertyNode(StatNode):
# Definition of a property in an extension type.
#
# name string
# doc EncodedString or None Doc string
# body StatListNode
child_attrs = ["body"]
def analyse_declarations(self, env):
entry = env.declare_property(self.name, self.doc, self.pos)
if entry:
entry.scope.directives = env.directives
self.body.analyse_declarations(entry.scope)
def analyse_expressions(self, env):
self.body.analyse_expressions(env)
def generate_function_definitions(self, env, code):
self.body.generate_function_definitions(env, code)
def generate_execution_code(self, code):
pass
def annotate(self, code):
self.body.annotate(code)
class GlobalNode(StatNode):
# Global variable declaration.
#
# names [string]
child_attrs = []
def analyse_declarations(self, env):
for name in self.names:
env.declare_global(name, self.pos)
<|fim▁hole|> def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
pass
class NonlocalNode(StatNode):
# Nonlocal variable declaration via the 'nonlocal' keyword.
#
# names [string]
child_attrs = []
def analyse_declarations(self, env):
for name in self.names:
env.declare_nonlocal(name, self.pos)
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
pass
class ExprStatNode(StatNode):
# Expression used as a statement.
#
# expr ExprNode
child_attrs = ["expr"]
def analyse_declarations(self, env):
import ExprNodes
if isinstance(self.expr, ExprNodes.GeneralCallNode):
func = self.expr.function.as_cython_attribute()
if func == u'declare':
args, kwds = self.expr.explicit_args_kwds()
if len(args):
error(self.expr.pos, "Variable names must be specified.")
for var, type_node in kwds.key_value_pairs:
type = type_node.analyse_as_type(env)
if type is None:
error(type_node.pos, "Unknown type")
else:
env.declare_var(var.value, type, var.pos, is_cdef = True)
self.__class__ = PassStatNode
def analyse_expressions(self, env):
self.expr.result_is_used = False # hint that .result() may safely be left empty
self.expr.analyse_expressions(env)
def nogil_check(self, env):
if self.expr.type.is_pyobject and self.expr.is_temp:
self.gil_error()
gil_message = "Discarding owned Python object"
def generate_execution_code(self, code):
self.expr.generate_evaluation_code(code)
if not self.expr.is_temp and self.expr.result():
code.putln("%s;" % self.expr.result())
self.expr.generate_disposal_code(code)
self.expr.free_temps(code)
def generate_function_definitions(self, env, code):
self.expr.generate_function_definitions(env, code)
def annotate(self, code):
self.expr.annotate(code)
class AssignmentNode(StatNode):
# Abstract base class for assignment nodes.
#
# The analyse_expressions and generate_execution_code
# phases of assignments are split into two sub-phases
# each, to enable all the right hand sides of a
# parallel assignment to be evaluated before assigning
# to any of the left hand sides.
def analyse_expressions(self, env):
self.analyse_types(env)
# def analyse_expressions(self, env):
# self.analyse_expressions_1(env)
# self.analyse_expressions_2(env)
def generate_execution_code(self, code):
self.generate_rhs_evaluation_code(code)
self.generate_assignment_code(code)
class SingleAssignmentNode(AssignmentNode):
# The simplest case:
#
# a = b
#
# lhs ExprNode Left hand side
# rhs ExprNode Right hand side
# first bool Is this guaranteed the first assignment to lhs?
child_attrs = ["lhs", "rhs"]
first = False
declaration_only = False
def analyse_declarations(self, env):
import ExprNodes
# handle declarations of the form x = cython.foo()
if isinstance(self.rhs, ExprNodes.CallNode):
func_name = self.rhs.function.as_cython_attribute()
if func_name:
args, kwds = self.rhs.explicit_args_kwds()
if func_name in ['declare', 'typedef']:
if len(args) > 2 or kwds is not None:
error(self.rhs.pos, "Can only declare one type at a time.")
return
type = args[0].analyse_as_type(env)
if type is None:
error(args[0].pos, "Unknown type")
return
lhs = self.lhs
if func_name == 'declare':
if isinstance(lhs, ExprNodes.NameNode):
vars = [(lhs.name, lhs.pos)]
elif isinstance(lhs, ExprNodes.TupleNode):
vars = [(var.name, var.pos) for var in lhs.args]
else:
error(lhs.pos, "Invalid declaration")
return
for var, pos in vars:
env.declare_var(var, type, pos, is_cdef = True)
if len(args) == 2:
# we have a value
self.rhs = args[1]
else:
self.declaration_only = True
else:
self.declaration_only = True
if not isinstance(lhs, ExprNodes.NameNode):
error(lhs.pos, "Invalid declaration.")
env.declare_typedef(lhs.name, type, self.pos, visibility='private')
elif func_name in ['struct', 'union']:
self.declaration_only = True
if len(args) > 0 or kwds is None:
error(self.rhs.pos, "Struct or union members must be given by name.")
return
members = []
for member, type_node in kwds.key_value_pairs:
type = type_node.analyse_as_type(env)
if type is None:
error(type_node.pos, "Unknown type")
else:
members.append((member.value, type, member.pos))
if len(members) < len(kwds.key_value_pairs):
return
if not isinstance(self.lhs, ExprNodes.NameNode):
error(self.lhs.pos, "Invalid declaration.")
name = self.lhs.name
scope = StructOrUnionScope(name)
env.declare_struct_or_union(name, func_name, scope, False, self.rhs.pos)
for member, type, pos in members:
scope.declare_var(member, type, pos)
if self.declaration_only:
return
else:
self.lhs.analyse_target_declaration(env)
def analyse_types(self, env, use_temp = 0):
self.rhs.analyse_types(env)
self.lhs.analyse_target_types(env)
self.lhs.gil_assignment_check(env)
self.rhs = self.rhs.coerce_to(self.lhs.type, env)
if use_temp:
self.rhs = self.rhs.coerce_to_temp(env)
def generate_rhs_evaluation_code(self, code):
self.rhs.generate_evaluation_code(code)
def generate_assignment_code(self, code):
self.lhs.generate_assignment_code(self.rhs, code)
def generate_function_definitions(self, env, code):
self.rhs.generate_function_definitions(env, code)
def annotate(self, code):
self.lhs.annotate(code)
self.rhs.annotate(code)
class CascadedAssignmentNode(AssignmentNode):
# An assignment with multiple left hand sides:
#
# a = b = c
#
# lhs_list [ExprNode] Left hand sides
# rhs ExprNode Right hand sides
#
# Used internally:
#
# coerced_rhs_list [ExprNode] RHS coerced to type of each LHS
child_attrs = ["lhs_list", "rhs", "coerced_rhs_list"]
coerced_rhs_list = None
def analyse_declarations(self, env):
for lhs in self.lhs_list:
lhs.analyse_target_declaration(env)
def analyse_types(self, env, use_temp = 0):
self.rhs.analyse_types(env)
if not self.rhs.is_simple():
if use_temp:
self.rhs = self.rhs.coerce_to_temp(env)
else:
self.rhs = self.rhs.coerce_to_simple(env)
from ExprNodes import CloneNode
self.coerced_rhs_list = []
for lhs in self.lhs_list:
lhs.analyse_target_types(env)
lhs.gil_assignment_check(env)
rhs = CloneNode(self.rhs)
rhs = rhs.coerce_to(lhs.type, env)
self.coerced_rhs_list.append(rhs)
def generate_rhs_evaluation_code(self, code):
self.rhs.generate_evaluation_code(code)
def generate_assignment_code(self, code):
for i in range(len(self.lhs_list)):
lhs = self.lhs_list[i]
rhs = self.coerced_rhs_list[i]
rhs.generate_evaluation_code(code)
lhs.generate_assignment_code(rhs, code)
# Assignment has disposed of the cloned RHS
self.rhs.generate_disposal_code(code)
self.rhs.free_temps(code)
def generate_function_definitions(self, env, code):
self.rhs.generate_function_definitions(env, code)
def annotate(self, code):
for i in range(len(self.lhs_list)):
lhs = self.lhs_list[i].annotate(code)
rhs = self.coerced_rhs_list[i].annotate(code)
self.rhs.annotate(code)
class ParallelAssignmentNode(AssignmentNode):
# A combined packing/unpacking assignment:
#
# a, b, c = d, e, f
#
# This has been rearranged by the parser into
#
# a = d ; b = e ; c = f
#
# but we must evaluate all the right hand sides
# before assigning to any of the left hand sides.
#
# stats [AssignmentNode] The constituent assignments
child_attrs = ["stats"]
def analyse_declarations(self, env):
for stat in self.stats:
stat.analyse_declarations(env)
def analyse_expressions(self, env):
for stat in self.stats:
stat.analyse_types(env, use_temp = 1)
# def analyse_expressions(self, env):
# for stat in self.stats:
# stat.analyse_expressions_1(env, use_temp = 1)
# for stat in self.stats:
# stat.analyse_expressions_2(env)
def generate_execution_code(self, code):
for stat in self.stats:
stat.generate_rhs_evaluation_code(code)
for stat in self.stats:
stat.generate_assignment_code(code)
def generate_function_definitions(self, env, code):
for stat in self.stats:
stat.generate_function_definitions(env, code)
def annotate(self, code):
for stat in self.stats:
stat.annotate(code)
class InPlaceAssignmentNode(AssignmentNode):
# An in place arithmetic operand:
#
# a += b
# a -= b
# ...
#
# lhs ExprNode Left hand side
# rhs ExprNode Right hand side
# operator char one of "+-*/%^&|"
#
# This code is a bit tricky because in order to obey Python
# semantics the sub-expressions (e.g. indices) of the lhs must
# not be evaluated twice. So we must re-use the values calculated
# in evaluation phase for the assignment phase as well.
# Fortunately, the type of the lhs node is fairly constrained
# (it must be a NameNode, AttributeNode, or IndexNode).
child_attrs = ["lhs", "rhs"]
def analyse_declarations(self, env):
self.lhs.analyse_target_declaration(env)
def analyse_types(self, env):
self.rhs.analyse_types(env)
self.lhs.analyse_target_types(env)
def generate_execution_code(self, code):
import ExprNodes
self.rhs.generate_evaluation_code(code)
self.lhs.generate_subexpr_evaluation_code(code)
c_op = self.operator
if c_op == "//":
c_op = "/"
elif c_op == "**":
error(self.pos, "No C inplace power operator")
if isinstance(self.lhs, ExprNodes.IndexNode) and self.lhs.is_buffer_access:
if self.lhs.type.is_pyobject:
error(self.pos, "In-place operators not allowed on object buffers in this release.")
if c_op in ('/', '%') and self.lhs.type.is_int and not code.directives['cdivision']:
error(self.pos, "In-place non-c divide operators not allowed on int buffers.")
self.lhs.generate_buffer_setitem_code(self.rhs, code, c_op)
else:
# C++
# TODO: make sure overload is declared
code.putln("%s %s= %s;" % (self.lhs.result(), c_op, self.rhs.result()))
self.lhs.generate_subexpr_disposal_code(code)
self.lhs.free_subexpr_temps(code)
self.rhs.generate_disposal_code(code)
self.rhs.free_temps(code)
def annotate(self, code):
self.lhs.annotate(code)
self.rhs.annotate(code)
def create_binop_node(self):
import ExprNodes
return ExprNodes.binop_node(self.pos, self.operator, self.lhs, self.rhs)
class PrintStatNode(StatNode):
# print statement
#
# arg_tuple TupleNode
# stream ExprNode or None (stdout)
# append_newline boolean
child_attrs = ["arg_tuple", "stream"]
def analyse_expressions(self, env):
if self.stream:
self.stream.analyse_expressions(env)
self.stream = self.stream.coerce_to_pyobject(env)
self.arg_tuple.analyse_expressions(env)
self.arg_tuple = self.arg_tuple.coerce_to_pyobject(env)
env.use_utility_code(printing_utility_code)
if len(self.arg_tuple.args) == 1 and self.append_newline:
env.use_utility_code(printing_one_utility_code)
nogil_check = Node.gil_error
gil_message = "Python print statement"
def generate_execution_code(self, code):
if self.stream:
self.stream.generate_evaluation_code(code)
stream_result = self.stream.py_result()
else:
stream_result = '0'
if len(self.arg_tuple.args) == 1 and self.append_newline:
arg = self.arg_tuple.args[0]
arg.generate_evaluation_code(code)
code.putln(
"if (__Pyx_PrintOne(%s, %s) < 0) %s" % (
stream_result,
arg.py_result(),
code.error_goto(self.pos)))
arg.generate_disposal_code(code)
arg.free_temps(code)
else:
self.arg_tuple.generate_evaluation_code(code)
code.putln(
"if (__Pyx_Print(%s, %s, %d) < 0) %s" % (
stream_result,
self.arg_tuple.py_result(),
self.append_newline,
code.error_goto(self.pos)))
self.arg_tuple.generate_disposal_code(code)
self.arg_tuple.free_temps(code)
if self.stream:
self.stream.generate_disposal_code(code)
self.stream.free_temps(code)
def generate_function_definitions(self, env, code):
if self.stream:
self.stream.generate_function_definitions(env, code)
self.arg_tuple.generate_function_definitions(env, code)
def annotate(self, code):
if self.stream:
self.stream.annotate(code)
self.arg_tuple.annotate(code)
class ExecStatNode(StatNode):
# exec statement
#
# args [ExprNode]
child_attrs = ["args"]
def analyse_expressions(self, env):
for i, arg in enumerate(self.args):
arg.analyse_expressions(env)
arg = arg.coerce_to_pyobject(env)
self.args[i] = arg
env.use_utility_code(Builtin.pyexec_utility_code)
nogil_check = Node.gil_error
gil_message = "Python exec statement"
def generate_execution_code(self, code):
args = []
for arg in self.args:
arg.generate_evaluation_code(code)
args.append( arg.py_result() )
args = tuple(args + ['0', '0'][:3-len(args)])
temp_result = code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=True)
code.putln("%s = __Pyx_PyRun(%s, %s, %s);" % (
(temp_result,) + args))
for arg in self.args:
arg.generate_disposal_code(code)
arg.free_temps(code)
code.putln(
code.error_goto_if_null(temp_result, self.pos))
code.put_gotref(temp_result)
code.put_decref_clear(temp_result, py_object_type)
code.funcstate.release_temp(temp_result)
def annotate(self, code):
for arg in self.args:
arg.annotate(code)
class DelStatNode(StatNode):
# del statement
#
# args [ExprNode]
child_attrs = ["args"]
def analyse_declarations(self, env):
for arg in self.args:
arg.analyse_target_declaration(env)
def analyse_expressions(self, env):
for arg in self.args:
arg.analyse_target_expression(env, None)
if arg.type.is_pyobject or (arg.is_name and
arg.type.is_memoryviewslice):
pass
elif arg.type.is_ptr and arg.type.base_type.is_cpp_class:
self.cpp_check(env)
elif arg.type.is_cpp_class:
error(arg.pos, "Deletion of non-heap C++ object")
else:
error(arg.pos, "Deletion of non-Python, non-C++ object")
#arg.release_target_temp(env)
def nogil_check(self, env):
for arg in self.args:
if arg.type.is_pyobject:
self.gil_error()
gil_message = "Deleting Python object"
def generate_execution_code(self, code):
for arg in self.args:
if arg.type.is_pyobject or arg.type.is_memoryviewslice:
arg.generate_deletion_code(code)
elif arg.type.is_ptr and arg.type.base_type.is_cpp_class:
arg.generate_result_code(code)
code.putln("delete %s;" % arg.result())
# else error reported earlier
def annotate(self, code):
for arg in self.args:
arg.annotate(code)
class PassStatNode(StatNode):
# pass statement
child_attrs = []
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
pass
class BreakStatNode(StatNode):
child_attrs = []
is_terminator = True
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
if not code.break_label:
error(self.pos, "break statement not inside loop")
else:
code.put_goto(code.break_label)
class ContinueStatNode(StatNode):
child_attrs = []
is_terminator = True
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
if code.funcstate.in_try_finally:
error(self.pos, "continue statement inside try of try...finally")
elif not code.continue_label:
error(self.pos, "continue statement not inside loop")
else:
code.put_goto(code.continue_label)
class ReturnStatNode(StatNode):
# return statement
#
# value ExprNode or None
# return_type PyrexType
child_attrs = ["value"]
is_terminator = True
# Whether we are in a parallel section
in_parallel = False
def analyse_expressions(self, env):
return_type = env.return_type
self.return_type = return_type
if not return_type:
error(self.pos, "Return not inside a function body")
return
if self.value:
self.value.analyse_types(env)
if return_type.is_void or return_type.is_returncode:
error(self.value.pos,
"Return with value in void function")
else:
self.value = self.value.coerce_to(env.return_type, env)
else:
if (not return_type.is_void
and not return_type.is_pyobject
and not return_type.is_returncode):
error(self.pos, "Return value required")
def nogil_check(self, env):
if self.return_type.is_pyobject:
self.gil_error()
gil_message = "Returning Python object"
def generate_execution_code(self, code):
code.mark_pos(self.pos)
if not self.return_type:
# error reported earlier
return
if self.return_type.is_pyobject:
code.put_xdecref(Naming.retval_cname,
self.return_type)
if self.value:
self.value.generate_evaluation_code(code)
if self.return_type.is_memoryviewslice:
import MemoryView
MemoryView.put_acquire_memoryviewslice(
lhs_cname=Naming.retval_cname,
lhs_type=self.return_type,
lhs_pos=self.value.pos,
rhs=self.value,
code=code,
incref_rhs=self.value.is_name,
have_gil=self.in_nogil_context)
else:
self.value.make_owned_reference(code)
code.putln(
"%s = %s;" % (
Naming.retval_cname,
self.value.result_as(self.return_type)))
self.value.generate_post_assignment_code(code)
self.value.free_temps(code)
else:
if self.return_type.is_pyobject:
code.put_init_to_py_none(Naming.retval_cname, self.return_type)
elif self.return_type.is_returncode:
self.put_return(code, self.return_type.default_value)
for cname, type in code.funcstate.temps_holding_reference():
code.put_decref_clear(cname, type)
code.put_goto(code.return_label)
def put_return(self, code, value):
if self.in_parallel:
code.putln_openmp("#pragma omp critical(__pyx_returning)")
code.putln("%s = %s;" % (Naming.retval_cname, value))
def generate_function_definitions(self, env, code):
if self.value is not None:
self.value.generate_function_definitions(env, code)
def annotate(self, code):
if self.value:
self.value.annotate(code)
class RaiseStatNode(StatNode):
# raise statement
#
# exc_type ExprNode or None
# exc_value ExprNode or None
# exc_tb ExprNode or None
# cause ExprNode or None
child_attrs = ["exc_type", "exc_value", "exc_tb", "cause"]
is_terminator = True
def analyse_expressions(self, env):
if self.exc_type:
self.exc_type.analyse_types(env)
self.exc_type = self.exc_type.coerce_to_pyobject(env)
if self.exc_value:
self.exc_value.analyse_types(env)
self.exc_value = self.exc_value.coerce_to_pyobject(env)
if self.exc_tb:
self.exc_tb.analyse_types(env)
self.exc_tb = self.exc_tb.coerce_to_pyobject(env)
if self.cause:
self.cause.analyse_types(env)
self.cause = self.cause.coerce_to_pyobject(env)
# special cases for builtin exceptions
self.builtin_exc_name = None
if self.exc_type and not self.exc_value and not self.exc_tb:
exc = self.exc_type
import ExprNodes
if (isinstance(exc, ExprNodes.SimpleCallNode) and
not (exc.args or (exc.arg_tuple is not None and
exc.arg_tuple.args))):
exc = exc.function # extract the exception type
if exc.is_name and exc.entry.is_builtin:
self.builtin_exc_name = exc.name
if self.builtin_exc_name == 'MemoryError':
self.exc_type = None # has a separate implementation
nogil_check = Node.gil_error
gil_message = "Raising exception"
def generate_execution_code(self, code):
if self.builtin_exc_name == 'MemoryError':
code.putln('PyErr_NoMemory(); %s' % code.error_goto(self.pos))
return
if self.exc_type:
self.exc_type.generate_evaluation_code(code)
type_code = self.exc_type.py_result()
else:
type_code = "0"
if self.exc_value:
self.exc_value.generate_evaluation_code(code)
value_code = self.exc_value.py_result()
else:
value_code = "0"
if self.exc_tb:
self.exc_tb.generate_evaluation_code(code)
tb_code = self.exc_tb.py_result()
else:
tb_code = "0"
if self.cause:
self.cause.generate_evaluation_code(code)
cause_code = self.cause.py_result()
else:
cause_code = "0"
code.globalstate.use_utility_code(raise_utility_code)
code.putln(
"__Pyx_Raise(%s, %s, %s, %s);" % (
type_code,
value_code,
tb_code,
cause_code))
for obj in (self.exc_type, self.exc_value, self.exc_tb, self.cause):
if obj:
obj.generate_disposal_code(code)
obj.free_temps(code)
code.putln(
code.error_goto(self.pos))
def generate_function_definitions(self, env, code):
if self.exc_type is not None:
self.exc_type.generate_function_definitions(env, code)
if self.exc_value is not None:
self.exc_value.generate_function_definitions(env, code)
if self.exc_tb is not None:
self.exc_tb.generate_function_definitions(env, code)
if self.cause is not None:
self.cause.generate_function_definitions(env, code)
def annotate(self, code):
if self.exc_type:
self.exc_type.annotate(code)
if self.exc_value:
self.exc_value.annotate(code)
if self.exc_tb:
self.exc_tb.annotate(code)
if self.cause:
self.cause.annotate(code)
class ReraiseStatNode(StatNode):
child_attrs = []
is_terminator = True
def analyse_expressions(self, env):
env.use_utility_code(restore_exception_utility_code)
nogil_check = Node.gil_error
gil_message = "Raising exception"
def generate_execution_code(self, code):
vars = code.funcstate.exc_vars
if vars:
for varname in vars:
code.put_giveref(varname)
code.putln("__Pyx_ErrRestore(%s, %s, %s);" % tuple(vars))
for varname in vars:
code.put("%s = 0; " % varname)
code.putln()
code.putln(code.error_goto(self.pos))
else:
error(self.pos, "Reraise not inside except clause")
class AssertStatNode(StatNode):
# assert statement
#
# cond ExprNode
# value ExprNode or None
child_attrs = ["cond", "value"]
def analyse_expressions(self, env):
self.cond = self.cond.analyse_boolean_expression(env)
if self.value:
self.value.analyse_types(env)
self.value = self.value.coerce_to_pyobject(env)
nogil_check = Node.gil_error
gil_message = "Raising exception"
def generate_execution_code(self, code):
code.putln("#ifndef CYTHON_WITHOUT_ASSERTIONS")
self.cond.generate_evaluation_code(code)
code.putln(
"if (unlikely(!%s)) {" %
self.cond.result())
if self.value:
self.value.generate_evaluation_code(code)
code.putln(
"PyErr_SetObject(PyExc_AssertionError, %s);" %
self.value.py_result())
self.value.generate_disposal_code(code)
self.value.free_temps(code)
else:
code.putln(
"PyErr_SetNone(PyExc_AssertionError);")
code.putln(
code.error_goto(self.pos))
code.putln(
"}")
self.cond.generate_disposal_code(code)
self.cond.free_temps(code)
code.putln("#endif")
def generate_function_definitions(self, env, code):
self.cond.generate_function_definitions(env, code)
if self.value is not None:
self.value.generate_function_definitions(env, code)
def annotate(self, code):
self.cond.annotate(code)
if self.value:
self.value.annotate(code)
class IfStatNode(StatNode):
# if statement
#
# if_clauses [IfClauseNode]
# else_clause StatNode or None
child_attrs = ["if_clauses", "else_clause"]
def analyse_declarations(self, env):
for if_clause in self.if_clauses:
if_clause.analyse_declarations(env)
if self.else_clause:
self.else_clause.analyse_declarations(env)
def analyse_expressions(self, env):
for if_clause in self.if_clauses:
if_clause.analyse_expressions(env)
if self.else_clause:
self.else_clause.analyse_expressions(env)
def generate_execution_code(self, code):
code.mark_pos(self.pos)
end_label = code.new_label()
for if_clause in self.if_clauses:
if_clause.generate_execution_code(code, end_label)
if self.else_clause:
code.putln("/*else*/ {")
self.else_clause.generate_execution_code(code)
code.putln("}")
code.put_label(end_label)
def generate_function_definitions(self, env, code):
for clause in self.if_clauses:
clause.generate_function_definitions(env, code)
if self.else_clause is not None:
self.else_clause.generate_function_definitions(env, code)
def annotate(self, code):
for if_clause in self.if_clauses:
if_clause.annotate(code)
if self.else_clause:
self.else_clause.annotate(code)
class IfClauseNode(Node):
# if or elif clause in an if statement
#
# condition ExprNode
# body StatNode
child_attrs = ["condition", "body"]
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
def analyse_expressions(self, env):
self.condition = \
self.condition.analyse_temp_boolean_expression(env)
self.body.analyse_expressions(env)
def get_constant_condition_result(self):
if self.condition.has_constant_result():
return bool(self.condition.constant_result)
else:
return None
def generate_execution_code(self, code, end_label):
self.condition.generate_evaluation_code(code)
code.putln(
"if (%s) {" %
self.condition.result())
self.condition.generate_disposal_code(code)
self.condition.free_temps(code)
self.body.generate_execution_code(code)
code.put_goto(end_label)
code.putln("}")
def generate_function_definitions(self, env, code):
self.condition.generate_function_definitions(env, code)
self.body.generate_function_definitions(env, code)
def annotate(self, code):
self.condition.annotate(code)
self.body.annotate(code)
class SwitchCaseNode(StatNode):
# Generated in the optimization of an if-elif-else node
#
# conditions [ExprNode]
# body StatNode
child_attrs = ['conditions', 'body']
def generate_execution_code(self, code):
for cond in self.conditions:
code.mark_pos(cond.pos)
cond.generate_evaluation_code(code)
code.putln("case %s:" % cond.result())
self.body.generate_execution_code(code)
code.putln("break;")
def generate_function_definitions(self, env, code):
for cond in self.conditions:
cond.generate_function_definitions(env, code)
self.body.generate_function_definitions(env, code)
def annotate(self, code):
for cond in self.conditions:
cond.annotate(code)
self.body.annotate(code)
class SwitchStatNode(StatNode):
# Generated in the optimization of an if-elif-else node
#
# test ExprNode
# cases [SwitchCaseNode]
# else_clause StatNode or None
child_attrs = ['test', 'cases', 'else_clause']
def generate_execution_code(self, code):
self.test.generate_evaluation_code(code)
code.putln("switch (%s) {" % self.test.result())
for case in self.cases:
case.generate_execution_code(code)
if self.else_clause is not None:
code.putln("default:")
self.else_clause.generate_execution_code(code)
code.putln("break;")
code.putln("}")
def generate_function_definitions(self, env, code):
self.test.generate_function_definitions(env, code)
for case in self.cases:
case.generate_function_definitions(env, code)
if self.else_clause is not None:
self.else_clause.generate_function_definitions(env, code)
def annotate(self, code):
self.test.annotate(code)
for case in self.cases:
case.annotate(code)
if self.else_clause is not None:
self.else_clause.annotate(code)
class LoopNode(object):
pass
class WhileStatNode(LoopNode, StatNode):
# while statement
#
# condition ExprNode
# body StatNode
# else_clause StatNode
child_attrs = ["condition", "body", "else_clause"]
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
if self.else_clause:
self.else_clause.analyse_declarations(env)
def analyse_expressions(self, env):
if self.condition:
self.condition = self.condition.analyse_temp_boolean_expression(env)
self.body.analyse_expressions(env)
if self.else_clause:
self.else_clause.analyse_expressions(env)
def generate_execution_code(self, code):
old_loop_labels = code.new_loop_labels()
code.putln(
"while (1) {")
if self.condition:
self.condition.generate_evaluation_code(code)
self.condition.generate_disposal_code(code)
code.putln(
"if (!%s) break;" %
self.condition.result())
self.condition.free_temps(code)
self.body.generate_execution_code(code)
code.put_label(code.continue_label)
code.putln("}")
break_label = code.break_label
code.set_loop_labels(old_loop_labels)
if self.else_clause:
code.putln("/*else*/ {")
self.else_clause.generate_execution_code(code)
code.putln("}")
code.put_label(break_label)
def generate_function_definitions(self, env, code):
if self.condition:
self.condition.generate_function_definitions(env, code)
self.body.generate_function_definitions(env, code)
if self.else_clause is not None:
self.else_clause.generate_function_definitions(env, code)
def annotate(self, code):
if self.condition:
self.condition.annotate(code)
self.body.annotate(code)
if self.else_clause:
self.else_clause.annotate(code)
class DictIterationNextNode(Node):
# Helper node for calling PyDict_Next() inside of a WhileStatNode
# and checking the dictionary size for changes. Created in
# Optimize.py.
child_attrs = ['dict_obj', 'expected_size', 'pos_index_addr', 'key_addr', 'value_addr']
def __init__(self, dict_obj, expected_size, pos_index_addr, key_addr, value_addr):
Node.__init__(
self, dict_obj.pos,
dict_obj = dict_obj,
expected_size = expected_size,
pos_index_addr = pos_index_addr,
key_addr = key_addr,
value_addr = value_addr,
type = PyrexTypes.c_bint_type)
def analyse_expressions(self, env):
self.dict_obj.analyse_types(env)
self.expected_size.analyse_types(env)
self.pos_index_addr.analyse_types(env)
self.key_addr.analyse_types(env)
self.value_addr.analyse_types(env)
def generate_function_definitions(self, env, code):
self.dict_obj.generate_function_definitions(env, code)
def generate_execution_code(self, code):
self.dict_obj.generate_evaluation_code(code)
code.putln("if (unlikely(%s != PyDict_Size(%s))) {" % (
self.expected_size.result(),
self.dict_obj.py_result(),
))
code.putln('PyErr_SetString(PyExc_RuntimeError, "dictionary changed size during iteration"); %s' % (
code.error_goto(self.pos)))
code.putln("}")
self.pos_index_addr.generate_evaluation_code(code)
code.putln("if (!PyDict_Next(%s, %s, %s, %s)) break;" % (
self.dict_obj.py_result(),
self.pos_index_addr.result(),
self.key_addr.result(),
self.value_addr.result()))
def ForStatNode(pos, **kw):
if 'iterator' in kw:
return ForInStatNode(pos, **kw)
else:
return ForFromStatNode(pos, **kw)
class ForInStatNode(LoopNode, StatNode):
# for statement
#
# target ExprNode
# iterator IteratorNode
# body StatNode
# else_clause StatNode
# item NextNode used internally
child_attrs = ["target", "iterator", "body", "else_clause"]
item = None
def analyse_declarations(self, env):
self.target.analyse_target_declaration(env)
self.body.analyse_declarations(env)
if self.else_clause:
self.else_clause.analyse_declarations(env)
def analyse_expressions(self, env):
import ExprNodes
self.target.analyse_target_types(env)
self.iterator.analyse_expressions(env)
self.item = ExprNodes.NextNode(self.iterator)
if (self.iterator.type.is_ptr or self.iterator.type.is_array) and \
self.target.type.assignable_from(self.iterator.type):
# C array slice optimization.
pass
else:
self.item = self.item.coerce_to(self.target.type, env)
self.body.analyse_expressions(env)
if self.else_clause:
self.else_clause.analyse_expressions(env)
def generate_execution_code(self, code):
old_loop_labels = code.new_loop_labels()
self.iterator.generate_evaluation_code(code)
code.putln("for (;;) {")
self.item.generate_evaluation_code(code)
self.target.generate_assignment_code(self.item, code)
self.body.generate_execution_code(code)
code.put_label(code.continue_label)
code.putln("}")
break_label = code.break_label
code.set_loop_labels(old_loop_labels)
if self.else_clause:
# in nested loops, the 'else' block can contain a
# 'continue' statement for the outer loop, but we may need
# to generate cleanup code before taking that path, so we
# intercept it here
orig_continue_label = code.continue_label
code.continue_label = code.new_label('outer_continue')
code.putln("/*else*/ {")
self.else_clause.generate_execution_code(code)
code.putln("}")
if code.label_used(code.continue_label):
code.put_goto(break_label)
code.put_label(code.continue_label)
self.iterator.generate_disposal_code(code)
code.put_goto(orig_continue_label)
code.set_loop_labels(old_loop_labels)
if code.label_used(break_label):
code.put_label(break_label)
self.iterator.generate_disposal_code(code)
self.iterator.free_temps(code)
def generate_function_definitions(self, env, code):
self.target.generate_function_definitions(env, code)
self.iterator.generate_function_definitions(env, code)
self.body.generate_function_definitions(env, code)
if self.else_clause is not None:
self.else_clause.generate_function_definitions(env, code)
def annotate(self, code):
self.target.annotate(code)
self.iterator.annotate(code)
self.body.annotate(code)
if self.else_clause:
self.else_clause.annotate(code)
self.item.annotate(code)
class ForFromStatNode(LoopNode, StatNode):
# for name from expr rel name rel expr
#
# target NameNode
# bound1 ExprNode
# relation1 string
# relation2 string
# bound2 ExprNode
# step ExprNode or None
# body StatNode
# else_clause StatNode or None
#
# Used internally:
#
# from_range bool
# is_py_target bool
# loopvar_node ExprNode (usually a NameNode or temp node)
# py_loopvar_node PyTempNode or None
child_attrs = ["target", "bound1", "bound2", "step", "body", "else_clause"]
is_py_target = False
loopvar_node = None
py_loopvar_node = None
from_range = False
gil_message = "For-loop using object bounds or target"
def nogil_check(self, env):
for x in (self.target, self.bound1, self.bound2):
if x.type.is_pyobject:
self.gil_error()
def analyse_declarations(self, env):
self.target.analyse_target_declaration(env)
self.body.analyse_declarations(env)
if self.else_clause:
self.else_clause.analyse_declarations(env)
def analyse_expressions(self, env):
import ExprNodes
self.target.analyse_target_types(env)
self.bound1.analyse_types(env)
self.bound2.analyse_types(env)
if self.step is not None:
if isinstance(self.step, ExprNodes.UnaryMinusNode):
warning(self.step.pos, "Probable infinite loop in for-from-by statment. Consider switching the directions of the relations.", 2)
self.step.analyse_types(env)
target_type = self.target.type
if self.target.type.is_numeric:
loop_type = self.target.type
else:
loop_type = PyrexTypes.c_int_type
if not self.bound1.type.is_pyobject:
loop_type = PyrexTypes.widest_numeric_type(loop_type, self.bound1.type)
if not self.bound2.type.is_pyobject:
loop_type = PyrexTypes.widest_numeric_type(loop_type, self.bound2.type)
if self.step is not None and not self.step.type.is_pyobject:
loop_type = PyrexTypes.widest_numeric_type(loop_type, self.step.type)
self.bound1 = self.bound1.coerce_to(loop_type, env)
self.bound2 = self.bound2.coerce_to(loop_type, env)
if not self.bound2.is_literal:
self.bound2 = self.bound2.coerce_to_temp(env)
if self.step is not None:
self.step = self.step.coerce_to(loop_type, env)
if not self.step.is_literal:
self.step = self.step.coerce_to_temp(env)
target_type = self.target.type
if not (target_type.is_pyobject or target_type.is_numeric):
error(self.target.pos,
"for-from loop variable must be c numeric type or Python object")
if target_type.is_numeric:
self.is_py_target = False
if isinstance(self.target, ExprNodes.IndexNode) and self.target.is_buffer_access:
raise error(self.pos, "Buffer indexing not allowed as for loop target.")
self.loopvar_node = self.target
self.py_loopvar_node = None
else:
self.is_py_target = True
c_loopvar_node = ExprNodes.TempNode(self.pos, loop_type, env)
self.loopvar_node = c_loopvar_node
self.py_loopvar_node = \
ExprNodes.CloneNode(c_loopvar_node).coerce_to_pyobject(env)
self.body.analyse_expressions(env)
if self.else_clause:
self.else_clause.analyse_expressions(env)
def generate_execution_code(self, code):
old_loop_labels = code.new_loop_labels()
from_range = self.from_range
self.bound1.generate_evaluation_code(code)
self.bound2.generate_evaluation_code(code)
offset, incop = self.relation_table[self.relation1]
if self.step is not None:
self.step.generate_evaluation_code(code)
step = self.step.result()
incop = "%s=%s" % (incop[0], step)
import ExprNodes
if isinstance(self.loopvar_node, ExprNodes.TempNode):
self.loopvar_node.allocate(code)
if isinstance(self.py_loopvar_node, ExprNodes.TempNode):
self.py_loopvar_node.allocate(code)
if from_range:
loopvar_name = code.funcstate.allocate_temp(self.target.type, False)
else:
loopvar_name = self.loopvar_node.result()
code.putln(
"for (%s = %s%s; %s %s %s; %s%s) {" % (
loopvar_name,
self.bound1.result(), offset,
loopvar_name, self.relation2, self.bound2.result(),
loopvar_name, incop))
if self.py_loopvar_node:
self.py_loopvar_node.generate_evaluation_code(code)
self.target.generate_assignment_code(self.py_loopvar_node, code)
elif from_range:
code.putln("%s = %s;" % (
self.target.result(), loopvar_name))
self.body.generate_execution_code(code)
code.put_label(code.continue_label)
if self.py_loopvar_node:
# This mess is to make for..from loops with python targets behave
# exactly like those with C targets with regards to re-assignment
# of the loop variable.
import ExprNodes
if self.target.entry.is_pyglobal:
# We know target is a NameNode, this is the only ugly case.
target_node = ExprNodes.PyTempNode(self.target.pos, None)
target_node.allocate(code)
interned_cname = code.intern_identifier(self.target.entry.name)
code.globalstate.use_utility_code(ExprNodes.get_name_interned_utility_code)
code.putln("%s = __Pyx_GetName(%s, %s); %s" % (
target_node.result(),
Naming.module_cname,
interned_cname,
code.error_goto_if_null(target_node.result(), self.target.pos)))
code.put_gotref(target_node.result())
else:
target_node = self.target
from_py_node = ExprNodes.CoerceFromPyTypeNode(self.loopvar_node.type, target_node, None)
from_py_node.temp_code = loopvar_name
from_py_node.generate_result_code(code)
if self.target.entry.is_pyglobal:
code.put_decref(target_node.result(), target_node.type)
target_node.release(code)
code.putln("}")
if self.py_loopvar_node:
# This is potentially wasteful, but we don't want the semantics to
# depend on whether or not the loop is a python type.
self.py_loopvar_node.generate_evaluation_code(code)
self.target.generate_assignment_code(self.py_loopvar_node, code)
if from_range:
code.funcstate.release_temp(loopvar_name)
break_label = code.break_label
code.set_loop_labels(old_loop_labels)
if self.else_clause:
code.putln("/*else*/ {")
self.else_clause.generate_execution_code(code)
code.putln("}")
code.put_label(break_label)
self.bound1.generate_disposal_code(code)
self.bound1.free_temps(code)
self.bound2.generate_disposal_code(code)
self.bound2.free_temps(code)
if isinstance(self.loopvar_node, ExprNodes.TempNode):
self.loopvar_node.release(code)
if isinstance(self.py_loopvar_node, ExprNodes.TempNode):
self.py_loopvar_node.release(code)
if self.step is not None:
self.step.generate_disposal_code(code)
self.step.free_temps(code)
relation_table = {
# {relop : (initial offset, increment op)}
'<=': ("", "++"),
'<' : ("+1", "++"),
'>=': ("", "--"),
'>' : ("-1", "--")
}
def generate_function_definitions(self, env, code):
self.target.generate_function_definitions(env, code)
self.bound1.generate_function_definitions(env, code)
self.bound2.generate_function_definitions(env, code)
if self.step is not None:
self.step.generate_function_definitions(env, code)
self.body.generate_function_definitions(env, code)
if self.else_clause is not None:
self.else_clause.generate_function_definitions(env, code)
def annotate(self, code):
self.target.annotate(code)
self.bound1.annotate(code)
self.bound2.annotate(code)
if self.step:
self.step.annotate(code)
self.body.annotate(code)
if self.else_clause:
self.else_clause.annotate(code)
class WithStatNode(StatNode):
"""
Represents a Python with statement.
Implemented by the WithTransform as follows:
MGR = EXPR
EXIT = MGR.__exit__
VALUE = MGR.__enter__()
EXC = True
try:
try:
TARGET = VALUE # optional
BODY
except:
EXC = False
if not EXIT(*EXCINFO):
raise
finally:
if EXC:
EXIT(None, None, None)
MGR = EXIT = VALUE = None
"""
# manager The with statement manager object
# target ExprNode the target lhs of the __enter__() call
# body StatNode
# enter_call ExprNode the call to the __enter__() method
child_attrs = ["manager", "target", "body", "enter_call"]
enter_call = None
has_target = False
def analyse_declarations(self, env):
self.manager.analyse_declarations(env)
self.enter_call.analyse_declarations(env)
self.body.analyse_declarations(env)
def analyse_expressions(self, env):
self.manager.analyse_types(env)
self.enter_call.analyse_types(env)
self.body.analyse_expressions(env)
def generate_function_definitions(self, env, code):
self.manager.generate_function_definitions(env, code)
self.enter_call.generate_function_definitions(env, code)
self.body.generate_function_definitions(env, code)
def generate_execution_code(self, code):
code.putln("/*with:*/ {")
self.manager.generate_evaluation_code(code)
self.exit_var = code.funcstate.allocate_temp(py_object_type, manage_ref=False)
code.putln("%s = PyObject_GetAttr(%s, %s); %s" % (
self.exit_var,
self.manager.py_result(),
code.get_py_string_const(EncodedString('__exit__'), identifier=True),
code.error_goto_if_null(self.exit_var, self.pos),
))
code.put_gotref(self.exit_var)
# need to free exit_var in the face of exceptions during setup
old_error_label = code.new_error_label()
intermediate_error_label = code.error_label
self.enter_call.generate_evaluation_code(code)
if not self.target:
self.enter_call.generate_disposal_code(code)
self.enter_call.free_temps(code)
self.manager.generate_disposal_code(code)
self.manager.free_temps(code)
code.error_label = old_error_label
self.body.generate_execution_code(code)
if code.label_used(intermediate_error_label):
step_over_label = code.new_label()
code.put_goto(step_over_label)
code.put_label(intermediate_error_label)
code.put_decref_clear(self.exit_var, py_object_type)
code.put_goto(old_error_label)
code.put_label(step_over_label)
code.funcstate.release_temp(self.exit_var)
code.putln('}')
class WithTargetAssignmentStatNode(AssignmentNode):
# The target assignment of the 'with' statement value (return
# value of the __enter__() call).
#
# This is a special cased assignment that steals the RHS reference
# and frees its temp.
#
# lhs ExprNode the assignment target
# orig_rhs ExprNode the return value of the __enter__() call (not owned by this node!)
# rhs ResultRefNode a ResultRefNode for the orig_rhs (owned by this node)
child_attrs = ["lhs", "rhs"]
def analyse_declarations(self, env):
self.lhs.analyse_target_declaration(env)
def analyse_types(self, env):
self.rhs.analyse_types(env)
self.lhs.analyse_target_types(env)
self.lhs.gil_assignment_check(env)
self.rhs = self.rhs.coerce_to(self.lhs.type, env)
def generate_execution_code(self, code):
self.rhs.generate_evaluation_code(code)
self.lhs.generate_assignment_code(self.rhs, code)
self.orig_rhs.generate_disposal_code(code)
self.orig_rhs.free_temps(code)
def annotate(self, code):
self.lhs.annotate(code)
self.rhs.annotate(code)
class TryExceptStatNode(StatNode):
# try .. except statement
#
# body StatNode
# except_clauses [ExceptClauseNode]
# else_clause StatNode or None
child_attrs = ["body", "except_clauses", "else_clause"]
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
for except_clause in self.except_clauses:
except_clause.analyse_declarations(env)
if self.else_clause:
self.else_clause.analyse_declarations(env)
env.use_utility_code(reset_exception_utility_code)
def analyse_expressions(self, env):
self.body.analyse_expressions(env)
default_clause_seen = 0
for except_clause in self.except_clauses:
except_clause.analyse_expressions(env)
if default_clause_seen:
error(except_clause.pos, "default 'except:' must be last")
if not except_clause.pattern:
default_clause_seen = 1
self.has_default_clause = default_clause_seen
if self.else_clause:
self.else_clause.analyse_expressions(env)
nogil_check = Node.gil_error
gil_message = "Try-except statement"
def generate_execution_code(self, code):
old_return_label = code.return_label
old_break_label = code.break_label
old_continue_label = code.continue_label
old_error_label = code.new_error_label()
our_error_label = code.error_label
except_end_label = code.new_label('exception_handled')
except_error_label = code.new_label('except_error')
except_return_label = code.new_label('except_return')
try_return_label = code.new_label('try_return')
try_break_label = code.new_label('try_break')
try_continue_label = code.new_label('try_continue')
try_end_label = code.new_label('try_end')
exc_save_vars = [code.funcstate.allocate_temp(py_object_type, False)
for i in xrange(3)]
code.putln("{")
code.putln("__Pyx_ExceptionSave(%s);" %
', '.join(['&%s' % var for var in exc_save_vars]))
for var in exc_save_vars:
code.put_xgotref(var)
code.putln(
"/*try:*/ {")
code.return_label = try_return_label
code.break_label = try_break_label
code.continue_label = try_continue_label
self.body.generate_execution_code(code)
code.putln(
"}")
temps_to_clean_up = code.funcstate.all_free_managed_temps()
code.error_label = except_error_label
code.return_label = except_return_label
if self.else_clause:
code.putln(
"/*else:*/ {")
self.else_clause.generate_execution_code(code)
code.putln(
"}")
for var in exc_save_vars:
code.put_xdecref_clear(var, py_object_type)
code.put_goto(try_end_label)
if code.label_used(try_return_label):
code.put_label(try_return_label)
for var in exc_save_vars:
code.put_xgiveref(var)
code.putln("__Pyx_ExceptionReset(%s);" %
', '.join(exc_save_vars))
code.put_goto(old_return_label)
code.put_label(our_error_label)
for temp_name, type in temps_to_clean_up:
code.put_xdecref_clear(temp_name, type)
for except_clause in self.except_clauses:
except_clause.generate_handling_code(code, except_end_label)
error_label_used = code.label_used(except_error_label)
if error_label_used or not self.has_default_clause:
if error_label_used:
code.put_label(except_error_label)
for var in exc_save_vars:
code.put_xgiveref(var)
code.putln("__Pyx_ExceptionReset(%s);" %
', '.join(exc_save_vars))
code.put_goto(old_error_label)
for exit_label, old_label in zip(
[try_break_label, try_continue_label, except_return_label],
[old_break_label, old_continue_label, old_return_label]):
if code.label_used(exit_label):
code.put_label(exit_label)
for var in exc_save_vars:
code.put_xgiveref(var)
code.putln("__Pyx_ExceptionReset(%s);" %
', '.join(exc_save_vars))
code.put_goto(old_label)
if code.label_used(except_end_label):
code.put_label(except_end_label)
for var in exc_save_vars:
code.put_xgiveref(var)
code.putln("__Pyx_ExceptionReset(%s);" %
', '.join(exc_save_vars))
code.put_label(try_end_label)
code.putln("}")
for cname in exc_save_vars:
code.funcstate.release_temp(cname)
code.return_label = old_return_label
code.break_label = old_break_label
code.continue_label = old_continue_label
code.error_label = old_error_label
def generate_function_definitions(self, env, code):
self.body.generate_function_definitions(env, code)
for except_clause in self.except_clauses:
except_clause.generate_function_definitions(env, code)
if self.else_clause is not None:
self.else_clause.generate_function_definitions(env, code)
def annotate(self, code):
self.body.annotate(code)
for except_node in self.except_clauses:
except_node.annotate(code)
if self.else_clause:
self.else_clause.annotate(code)
class ExceptClauseNode(Node):
# Part of try ... except statement.
#
# pattern [ExprNode]
# target ExprNode or None
# body StatNode
# excinfo_target ResultRefNode or None optional target for exception info
# match_flag string result of exception match
# exc_value ExcValueNode used internally
# function_name string qualified name of enclosing function
# exc_vars (string * 3) local exception variables
# excinfo_target is never set by the parser, but can be set by a transform
# in order to extract more extensive information about the exception as a
# sys.exc_info()-style tuple into a target variable
child_attrs = ["pattern", "target", "body", "exc_value", "excinfo_target"]
exc_value = None
excinfo_target = None
def analyse_declarations(self, env):
if self.target:
self.target.analyse_target_declaration(env)
self.body.analyse_declarations(env)
def analyse_expressions(self, env):
import ExprNodes
genv = env.global_scope()
self.function_name = env.qualified_name
if self.pattern:
# normalise/unpack self.pattern into a list
for i, pattern in enumerate(self.pattern):
pattern.analyse_expressions(env)
self.pattern[i] = pattern.coerce_to_pyobject(env)
if self.target:
self.exc_value = ExprNodes.ExcValueNode(self.pos, env)
self.target.analyse_target_expression(env, self.exc_value)
if self.excinfo_target is not None:
import ExprNodes
self.excinfo_tuple = ExprNodes.TupleNode(pos=self.pos, args=[
ExprNodes.ExcValueNode(pos=self.pos, env=env) for x in range(3)])
self.excinfo_tuple.analyse_expressions(env)
self.body.analyse_expressions(env)
def generate_handling_code(self, code, end_label):
code.mark_pos(self.pos)
if self.pattern:
exc_tests = []
for pattern in self.pattern:
pattern.generate_evaluation_code(code)
exc_tests.append("PyErr_ExceptionMatches(%s)" % pattern.py_result())
match_flag = code.funcstate.allocate_temp(PyrexTypes.c_int_type, False)
code.putln(
"%s = %s;" % (match_flag, ' || '.join(exc_tests)))
for pattern in self.pattern:
pattern.generate_disposal_code(code)
pattern.free_temps(code)
code.putln(
"if (%s) {" %
match_flag)
code.funcstate.release_temp(match_flag)
else:
code.putln("/*except:*/ {")
if not getattr(self.body, 'stats', True) and \
self.excinfo_target is None and self.target is None:
# most simple case: no exception variable, empty body (pass)
# => reset the exception state, done
code.putln("PyErr_Restore(0,0,0);")
code.put_goto(end_label)
code.putln("}")
return
exc_vars = [code.funcstate.allocate_temp(py_object_type,
manage_ref=True)
for i in xrange(3)]
code.put_add_traceback(self.function_name)
# We always have to fetch the exception value even if
# there is no target, because this also normalises the
# exception and stores it in the thread state.
code.globalstate.use_utility_code(get_exception_utility_code)
exc_args = "&%s, &%s, &%s" % tuple(exc_vars)
code.putln("if (__Pyx_GetException(%s) < 0) %s" % (exc_args,
code.error_goto(self.pos)))
for x in exc_vars:
code.put_gotref(x)
if self.target:
self.exc_value.set_var(exc_vars[1])
self.exc_value.generate_evaluation_code(code)
self.target.generate_assignment_code(self.exc_value, code)
if self.excinfo_target is not None:
for tempvar, node in zip(exc_vars, self.excinfo_tuple.args):
node.set_var(tempvar)
self.excinfo_tuple.generate_evaluation_code(code)
self.excinfo_target.result_code = self.excinfo_tuple.result()
old_break_label, old_continue_label = code.break_label, code.continue_label
code.break_label = code.new_label('except_break')
code.continue_label = code.new_label('except_continue')
old_exc_vars = code.funcstate.exc_vars
code.funcstate.exc_vars = exc_vars
self.body.generate_execution_code(code)
code.funcstate.exc_vars = old_exc_vars
if self.excinfo_target is not None:
self.excinfo_tuple.generate_disposal_code(code)
for var in exc_vars:
code.put_decref_clear(var, py_object_type)
code.put_goto(end_label)
if code.label_used(code.break_label):
code.put_label(code.break_label)
if self.excinfo_target is not None:
self.excinfo_tuple.generate_disposal_code(code)
for var in exc_vars:
code.put_decref_clear(var, py_object_type)
code.put_goto(old_break_label)
code.break_label = old_break_label
if code.label_used(code.continue_label):
code.put_label(code.continue_label)
if self.excinfo_target is not None:
self.excinfo_tuple.generate_disposal_code(code)
for var in exc_vars:
code.put_decref_clear(var, py_object_type)
code.put_goto(old_continue_label)
code.continue_label = old_continue_label
if self.excinfo_target is not None:
self.excinfo_tuple.free_temps(code)
for temp in exc_vars:
code.funcstate.release_temp(temp)
code.putln(
"}")
def generate_function_definitions(self, env, code):
if self.target is not None:
self.target.generate_function_definitions(env, code)
self.body.generate_function_definitions(env, code)
def annotate(self, code):
if self.pattern:
for pattern in self.pattern:
pattern.annotate(code)
if self.target:
self.target.annotate(code)
self.body.annotate(code)
class TryFinallyStatNode(StatNode):
# try ... finally statement
#
# body StatNode
# finally_clause StatNode
#
# The plan is that we funnel all continue, break
# return and error gotos into the beginning of the
# finally block, setting a variable to remember which
# one we're doing. At the end of the finally block, we
# switch on the variable to figure out where to go.
# In addition, if we're doing an error, we save the
# exception on entry to the finally block and restore
# it on exit.
child_attrs = ["body", "finally_clause"]
preserve_exception = 1
# handle exception case, in addition to return/break/continue
handle_error_case = True
disallow_continue_in_try_finally = 0
# There doesn't seem to be any point in disallowing
# continue in the try block, since we have no problem
# handling it.
is_try_finally_in_nogil = False
def create_analysed(pos, env, body, finally_clause):
node = TryFinallyStatNode(pos, body=body, finally_clause=finally_clause)
return node
create_analysed = staticmethod(create_analysed)
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
self.finally_clause.analyse_declarations(env)
def analyse_expressions(self, env):
self.body.analyse_expressions(env)
self.finally_clause.analyse_expressions(env)
nogil_check = Node.gil_error
gil_message = "Try-finally statement"
def generate_execution_code(self, code):
old_error_label = code.error_label
old_labels = code.all_new_labels()
new_labels = code.get_all_labels()
new_error_label = code.error_label
if not self.handle_error_case:
code.error_label = old_error_label
catch_label = code.new_label()
code.putln("/*try:*/ {")
if self.disallow_continue_in_try_finally:
was_in_try_finally = code.funcstate.in_try_finally
code.funcstate.in_try_finally = 1
self.body.generate_execution_code(code)
if self.disallow_continue_in_try_finally:
code.funcstate.in_try_finally = was_in_try_finally
code.putln("}")
temps_to_clean_up = code.funcstate.all_free_managed_temps()
code.mark_pos(self.finally_clause.pos)
code.putln("/*finally:*/ {")
cases_used = []
error_label_used = 0
for i, new_label in enumerate(new_labels):
if new_label in code.labels_used:
cases_used.append(i)
if new_label == new_error_label:
error_label_used = 1
error_label_case = i
if cases_used:
code.putln("int __pyx_why;")
if error_label_used and self.preserve_exception:
if self.is_try_finally_in_nogil:
code.declare_gilstate()
code.putln("PyObject *%s, *%s, *%s;" % Naming.exc_vars)
code.putln("int %s;" % Naming.exc_lineno_name)
exc_var_init_zero = ''.join(
["%s = 0; " % var for var in Naming.exc_vars])
exc_var_init_zero += '%s = 0;' % Naming.exc_lineno_name
code.putln(exc_var_init_zero)
else:
exc_var_init_zero = None
code.use_label(catch_label)
code.putln("__pyx_why = 0; goto %s;" % catch_label)
for i in cases_used:
new_label = new_labels[i]
#if new_label and new_label != "<try>":
if new_label == new_error_label and self.preserve_exception:
self.put_error_catcher(code,
new_error_label, i+1, catch_label, temps_to_clean_up)
else:
code.put('%s: ' % new_label)
if exc_var_init_zero:
code.putln(exc_var_init_zero)
code.putln("__pyx_why = %s; goto %s;" % (i+1, catch_label))
code.put_label(catch_label)
code.set_all_labels(old_labels)
if error_label_used:
code.new_error_label()
finally_error_label = code.error_label
self.finally_clause.generate_execution_code(code)
if error_label_used:
if finally_error_label in code.labels_used and self.preserve_exception:
over_label = code.new_label()
code.put_goto(over_label)
code.put_label(finally_error_label)
code.putln("if (__pyx_why == %d) {" % (error_label_case + 1))
if self.is_try_finally_in_nogil:
code.put_ensure_gil(declare_gilstate=False)
for var in Naming.exc_vars:
code.putln("Py_XDECREF(%s);" % var)
if self.is_try_finally_in_nogil:
code.put_release_ensured_gil()
code.putln("}")
code.put_goto(old_error_label)
code.put_label(over_label)
code.error_label = old_error_label
if cases_used:
code.putln(
"switch (__pyx_why) {")
for i in cases_used:
old_label = old_labels[i]
if old_label == old_error_label and self.preserve_exception:
self.put_error_uncatcher(code, i+1, old_error_label)
else:
code.use_label(old_label)
code.putln("case %s: goto %s;" % (i+1, old_label))
# End the switch
code.putln(
"}")
# End finally
code.putln(
"}")
def generate_function_definitions(self, env, code):
self.body.generate_function_definitions(env, code)
self.finally_clause.generate_function_definitions(env, code)
def put_error_catcher(self, code, error_label, i, catch_label,
temps_to_clean_up):
code.globalstate.use_utility_code(restore_exception_utility_code)
code.putln("%s: {" % error_label)
code.putln("__pyx_why = %s;" % i)
if self.is_try_finally_in_nogil:
code.put_ensure_gil(declare_gilstate=False)
for temp_name, type in temps_to_clean_up:
code.put_xdecref_clear(temp_name, type)
code.putln("__Pyx_ErrFetch(&%s, &%s, &%s);" % Naming.exc_vars)
code.putln("%s = %s;" % (Naming.exc_lineno_name, Naming.lineno_cname))
if self.is_try_finally_in_nogil:
code.put_release_ensured_gil()
code.put_goto(catch_label)
code.putln("}")
def put_error_uncatcher(self, code, i, error_label):
code.globalstate.use_utility_code(restore_exception_utility_code)
code.putln(
"case %s: {" % i)
if self.is_try_finally_in_nogil:
code.put_ensure_gil(declare_gilstate=False)
code.putln("__Pyx_ErrRestore(%s, %s, %s);" % Naming.exc_vars)
code.putln("%s = %s;" % (Naming.lineno_cname, Naming.exc_lineno_name))
if self.is_try_finally_in_nogil:
code.put_release_ensured_gil()
for var in Naming.exc_vars:
code.putln(
"%s = 0;" % var)
code.put_goto(error_label)
code.putln(
"}")
def annotate(self, code):
self.body.annotate(code)
self.finally_clause.annotate(code)
class NogilTryFinallyStatNode(TryFinallyStatNode):
"""
A try/finally statement that may be used in nogil code sections.
"""
preserve_exception = False
nogil_check = None
class GILStatNode(NogilTryFinallyStatNode):
# 'with gil' or 'with nogil' statement
#
# state string 'gil' or 'nogil'
def __init__(self, pos, state, body):
self.state = state
TryFinallyStatNode.__init__(self, pos,
body = body,
finally_clause = GILExitNode(pos, state = state))
def analyse_declarations(self, env):
env._in_with_gil_block = (self.state == 'gil')
if self.state == 'gil':
env.has_with_gil_block = True
return super(GILStatNode, self).analyse_declarations(env)
def analyse_expressions(self, env):
env.use_utility_code(force_init_threads_utility_code)
was_nogil = env.nogil
env.nogil = self.state == 'nogil'
TryFinallyStatNode.analyse_expressions(self, env)
env.nogil = was_nogil
def generate_execution_code(self, code):
code.mark_pos(self.pos)
code.begin_block()
if self.state == 'gil':
code.put_ensure_gil()
else:
code.put_release_gil()
TryFinallyStatNode.generate_execution_code(self, code)
code.end_block()
class GILExitNode(StatNode):
"""
Used as the 'finally' block in a GILStatNode
state string 'gil' or 'nogil'
"""
child_attrs = []
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
if self.state == 'gil':
code.put_release_ensured_gil()
else:
code.put_acquire_gil()
class EnsureGILNode(GILExitNode):
"""
Ensure the GIL in nogil functions for cleanup before returning.
"""
def generate_execution_code(self, code):
code.put_ensure_gil(declare_gilstate=False)
class CImportStatNode(StatNode):
# cimport statement
#
# module_name string Qualified name of module being imported
# as_name string or None Name specified in "as" clause, if any
child_attrs = []
def analyse_declarations(self, env):
if not env.is_module_scope:
error(self.pos, "cimport only allowed at module level")
return
module_scope = env.find_module(self.module_name, self.pos)
if "." in self.module_name:
names = [EncodedString(name) for name in self.module_name.split(".")]
top_name = names[0]
top_module_scope = env.context.find_submodule(top_name)
module_scope = top_module_scope
for name in names[1:]:
submodule_scope = module_scope.find_submodule(name)
module_scope.declare_module(name, submodule_scope, self.pos)
module_scope = submodule_scope
if self.as_name:
env.declare_module(self.as_name, module_scope, self.pos)
else:
env.add_imported_module(module_scope)
env.declare_module(top_name, top_module_scope, self.pos)
else:
name = self.as_name or self.module_name
env.declare_module(name, module_scope, self.pos)
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
pass
class FromCImportStatNode(StatNode):
# from ... cimport statement
#
# module_name string Qualified name of module
# imported_names [(pos, name, as_name, kind)] Names to be imported
child_attrs = []
def analyse_declarations(self, env):
if not env.is_module_scope:
error(self.pos, "cimport only allowed at module level")
return
module_scope = env.find_module(self.module_name, self.pos)
env.add_imported_module(module_scope)
for pos, name, as_name, kind in self.imported_names:
if name == "*":
for local_name, entry in module_scope.entries.items():
env.add_imported_entry(local_name, entry, pos)
else:
entry = module_scope.lookup(name)
if entry:
if kind and not self.declaration_matches(entry, kind):
entry.redeclared(pos)
entry.used = 1
else:
if kind == 'struct' or kind == 'union':
entry = module_scope.declare_struct_or_union(name,
kind = kind, scope = None, typedef_flag = 0, pos = pos)
elif kind == 'class':
entry = module_scope.declare_c_class(name, pos = pos,
module_name = self.module_name)
else:
submodule_scope = env.context.find_module(name, relative_to = module_scope, pos = self.pos)
if submodule_scope.parent_module is module_scope:
env.declare_module(as_name or name, submodule_scope, self.pos)
else:
error(pos, "Name '%s' not declared in module '%s'"
% (name, self.module_name))
if entry:
local_name = as_name or name
env.add_imported_entry(local_name, entry, pos)
def declaration_matches(self, entry, kind):
if not entry.is_type:
return 0
type = entry.type
if kind == 'class':
if not type.is_extension_type:
return 0
else:
if not type.is_struct_or_union:
return 0
if kind != type.kind:
return 0
return 1
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
pass
class FromImportStatNode(StatNode):
# from ... import statement
#
# module ImportNode
# items [(string, NameNode)]
# interned_items [(string, NameNode, ExprNode)]
# item PyTempNode used internally
# import_star boolean used internally
child_attrs = ["module"]
import_star = 0
def analyse_declarations(self, env):
for name, target in self.items:
if name == "*":
if not env.is_module_scope:
error(self.pos, "import * only allowed at module level")
return
env.has_import_star = 1
self.import_star = 1
else:
target.analyse_target_declaration(env)
def analyse_expressions(self, env):
import ExprNodes
self.module.analyse_expressions(env)
self.item = ExprNodes.RawCNameExprNode(self.pos, py_object_type)
self.interned_items = []
for name, target in self.items:
if name == '*':
for _, entry in env.entries.items():
if not entry.is_type and entry.type.is_extension_type:
env.use_utility_code(ExprNodes.type_test_utility_code)
break
else:
entry = env.lookup(target.name)
# check whether or not entry is already cimported
if (entry.is_type and entry.type.name == name
and hasattr(entry.type, 'module_name')):
if entry.type.module_name == self.module.module_name.value:
# cimported with absolute name
continue
try:
# cimported with relative name
module = env.find_module(self.module.module_name.value,
pos=None)
if entry.type.module_name == module.qualified_name:
continue
except AttributeError:
pass
target.analyse_target_expression(env, None)
if target.type is py_object_type:
coerced_item = None
else:
coerced_item = self.item.coerce_to(target.type, env)
self.interned_items.append((name, target, coerced_item))
if self.interned_items:
env.use_utility_code(raise_import_error_utility_code)
def generate_execution_code(self, code):
self.module.generate_evaluation_code(code)
if self.import_star:
code.putln(
'if (%s(%s) < 0) %s;' % (
Naming.import_star,
self.module.py_result(),
code.error_goto(self.pos)))
item_temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
self.item.set_cname(item_temp)
for name, target, coerced_item in self.interned_items:
cname = code.intern_identifier(name)
code.putln(
'%s = PyObject_GetAttr(%s, %s);' % (
item_temp,
self.module.py_result(),
cname))
code.putln('if (%s == NULL) {' % item_temp)
code.putln(
'if (PyErr_ExceptionMatches(PyExc_AttributeError)) '
'__Pyx_RaiseImportError(%s);' % cname)
code.putln(code.error_goto_if_null(item_temp, self.pos))
code.putln('}')
code.put_gotref(item_temp)
if coerced_item is None:
target.generate_assignment_code(self.item, code)
else:
coerced_item.allocate_temp_result(code)
coerced_item.generate_result_code(code)
target.generate_assignment_code(coerced_item, code)
code.put_decref_clear(item_temp, py_object_type)
code.funcstate.release_temp(item_temp)
self.module.generate_disposal_code(code)
self.module.free_temps(code)
class ParallelNode(Node):
"""
Base class for cython.parallel constructs.
"""
nogil_check = None
class ParallelStatNode(StatNode, ParallelNode):
"""
Base class for 'with cython.parallel.parallel():' and 'for i in prange():'.
assignments { Entry(var) : (var.pos, inplace_operator_or_None) }
assignments to variables in this parallel section
parent parent ParallelStatNode or None
is_parallel indicates whether this node is OpenMP parallel
(true for #pragma omp parallel for and
#pragma omp parallel)
is_parallel is true for:
#pragma omp parallel
#pragma omp parallel for
sections, but NOT for
#pragma omp for
We need this to determine the sharing attributes.
privatization_insertion_point a code insertion point used to make temps
private (esp. the "nsteps" temp)
args tuple the arguments passed to the parallel construct
kwargs DictNode the keyword arguments passed to the parallel
construct (replaced by its compile time value)
"""
child_attrs = ['body', 'num_threads']
body = None
is_prange = False
error_label_used = False
num_threads = None
parallel_exc = (
Naming.parallel_exc_type,
Naming.parallel_exc_value,
Naming.parallel_exc_tb,
)
parallel_pos_info = (
Naming.parallel_filename,
Naming.parallel_lineno,
Naming.parallel_clineno,
)
pos_info = (
Naming.filename_cname,
Naming.lineno_cname,
Naming.clineno_cname,
)
critical_section_counter = 0
def __init__(self, pos, **kwargs):
super(ParallelStatNode, self).__init__(pos, **kwargs)
# All assignments in this scope
self.assignments = kwargs.get('assignments') or {}
# All seen closure cnames and their temporary cnames
self.seen_closure_vars = set()
# Dict of variables that should be declared (first|last|)private or
# reduction { Entry: (op, lastprivate) }.
# If op is not None, it's a reduction.
self.privates = {}
# [NameNode]
self.assigned_nodes = []
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
self.num_threads = None
if self.kwargs:
for idx, dictitem in enumerate(self.kwargs.key_value_pairs[:]):
if dictitem.key.value == 'num_threads':
self.num_threads = dictitem.value
del self.kwargs.key_value_pairs[idx]
break
try:
self.kwargs = self.kwargs.compile_time_value(env)
except Exception, e:
error(self.kwargs.pos, "Only compile-time values may be "
"supplied as keyword arguments")
else:
self.kwargs = {}
for kw, val in self.kwargs.iteritems():
if kw not in self.valid_keyword_arguments:
error(self.pos, "Invalid keyword argument: %s" % kw)
else:
setattr(self, kw, val)
def analyse_expressions(self, env):
if self.num_threads:
self.num_threads.analyse_expressions(env)
self.body.analyse_expressions(env)
self.analyse_sharing_attributes(env)
if self.num_threads is not None:
if self.parent and self.parent.num_threads is not None:
error(self.pos,
"num_threads already declared in outer section")
elif self.parent:
error(self.pos,
"num_threads must be declared in the parent parallel section")
elif (self.num_threads.type.is_int and
self.num_threads.is_literal and
self.num_threads.compile_time_value(env) <= 0):
error(self.pos,
"argument to num_threads must be greater than 0")
self.num_threads = self.num_threads.coerce_to(
PyrexTypes.c_int_type, env).coerce_to_temp(env)
def analyse_sharing_attributes(self, env):
"""
Analyse the privates for this block and set them in self.privates.
This should be called in a post-order fashion during the
analyse_expressions phase
"""
for entry, (pos, op) in self.assignments.iteritems():
if self.is_prange and not self.is_parallel:
# closely nested prange in a with parallel block, disallow
# assigning to privates in the with parallel block (we
# consider it too implicit and magicky for users)
if entry in self.parent.assignments:
error(pos,
"Cannot assign to private of outer parallel block")
continue
if not self.is_prange and op:
# Again possible, but considered to magicky
error(pos, "Reductions not allowed for parallel blocks")
continue
# By default all variables should have the same values as if
# executed sequentially
lastprivate = True
self.propagate_var_privatization(entry, pos, op, lastprivate)
def propagate_var_privatization(self, entry, pos, op, lastprivate):
"""
Propagate the sharing attributes of a variable. If the privatization is
determined by a parent scope, done propagate further.
If we are a prange, we propagate our sharing attributes outwards to
other pranges. If we are a prange in parallel block and the parallel
block does not determine the variable private, we propagate to the
parent of the parent. Recursion stops at parallel blocks, as they have
no concept of lastprivate or reduction.
So the following cases propagate:
sum is a reduction for all loops:
for i in prange(n):
for j in prange(n):
for k in prange(n):
sum += i * j * k
sum is a reduction for both loops, local_var is private to the
parallel with block:
for i in prange(n):
with parallel:
local_var = ... # private to the parallel
for j in prange(n):
sum += i * j
Nested with parallel blocks are disallowed, because they wouldn't
allow you to propagate lastprivates or reductions:
#pragma omp parallel for lastprivate(i)
for i in prange(n):
sum = 0
#pragma omp parallel private(j, sum)
with parallel:
#pragma omp parallel
with parallel:
#pragma omp for lastprivate(j) reduction(+:sum)
for j in prange(n):
sum += i
# sum and j are well-defined here
# sum and j are undefined here
# sum and j are undefined here
"""
self.privates[entry] = (op, lastprivate)
if entry.type.is_memoryviewslice:
error(pos, "Memoryview slices can only be shared in parallel sections")
return
if self.is_prange:
if not self.is_parallel and entry not in self.parent.assignments:
# Parent is a parallel with block
parent = self.parent.parent
else:
parent = self.parent
# We don't need to propagate privates, only reductions and
# lastprivates
if parent and (op or lastprivate):
parent.propagate_var_privatization(entry, pos, op, lastprivate)
def _allocate_closure_temp(self, code, entry):
"""
Helper function that allocate a temporary for a closure variable that
is assigned to.
"""
if self.parent:
return self.parent._allocate_closure_temp(code, entry)
if entry.cname in self.seen_closure_vars:
return entry.cname
cname = code.funcstate.allocate_temp(entry.type, True)
# Add both the actual cname and the temp cname, as the actual cname
# will be replaced with the temp cname on the entry
self.seen_closure_vars.add(entry.cname)
self.seen_closure_vars.add(cname)
self.modified_entries.append((entry, entry.cname))
code.putln("%s = %s;" % (cname, entry.cname))
entry.cname = cname
def initialize_privates_to_nan(self, code, exclude=None):
first = True
for entry, (op, lastprivate) in self.privates.iteritems():
if not op and (not exclude or entry != exclude):
invalid_value = entry.type.invalid_value()
if invalid_value:
if first:
code.putln("/* Initialize private variables to "
"invalid values */")
code.globalstate.use_utility_code(
invalid_values_utility_code)
first = False
have_invalid_values = True
code.putln("%s = %s;" % (entry.cname,
entry.type.cast_code(invalid_value)))
def put_num_threads(self, code):
"""
Write self.num_threads if set as the num_threads OpenMP directive
"""
if self.num_threads is not None:
c = self.begin_of_parallel_control_block_point
# we need to set the owner to ourselves temporarily, as
# allocate_temp may generate a comment in the middle of our pragma
# otherwise when DebugFlags.debug_temp_code_comments is in effect
owner = c.funcstate.owner
c.funcstate.owner = c
self.num_threads.generate_evaluation_code(c)
c.funcstate.owner = owner
code.put(" num_threads(%s)" % (self.num_threads.result(),))
def declare_closure_privates(self, code):
"""
If a variable is in a scope object, we need to allocate a temp and
assign the value from the temp to the variable in the scope object
after the parallel section. This kind of copying should be done only
in the outermost parallel section.
"""
self.modified_entries = []
for entry, (pos, op) in self.assignments.iteritems():
if entry.from_closure or entry.in_closure:
self._allocate_closure_temp(code, entry)
def release_closure_privates(self, code):
"""
Release any temps used for variables in scope objects. As this is the
outermost parallel block, we don't need to delete the cnames from
self.seen_closure_vars.
"""
for entry, original_cname in self.modified_entries:
code.putln("%s = %s;" % (original_cname, entry.cname))
code.funcstate.release_temp(entry.cname)
entry.cname = original_cname
def privatize_temps(self, code, exclude_temps=()):
"""
Make any used temporaries private. Before the relevant code block
code.start_collecting_temps() should have been called.
"""
if self.is_parallel:
c = self.privatization_insertion_point
temps = code.funcstate.stop_collecting_temps()
privates, firstprivates = [], []
for temp, type in temps:
if type.is_pyobject:
firstprivates.append(temp)
else:
privates.append(temp)
if privates:
c.put(" private(%s)" % ", ".join(privates))
if firstprivates:
c.put(" firstprivate(%s)" % ", ".join(firstprivates))
if self.breaking_label_used:
shared_vars = [Naming.parallel_why]
if self.error_label_used:
shared_vars.extend(self.parallel_exc)
c.put(" private(%s, %s, %s)" % self.pos_info)
c.put(" shared(%s)" % ', '.join(shared_vars))
def setup_parallel_control_flow_block(self, code):
"""
Sets up a block that surrounds the parallel block to determine
how the parallel section was exited. Any kind of return is
trapped (break, continue, return, exceptions). This is the idea:
{
int why = 0;
#pragma omp parallel
{
return # -> goto new_return_label;
goto end_parallel;
new_return_label:
why = 3;
goto end_parallel;
end_parallel:;
#pragma omp flush(why) # we need to flush for every iteration
}
if (why == 3)
goto old_return_label;
}
"""
self.old_loop_labels = code.new_loop_labels()
self.old_error_label = code.new_error_label()
self.old_return_label = code.return_label
code.return_label = code.new_label(name="return")
code.begin_block() # parallel control flow block
self.begin_of_parallel_control_block_point = code.insertion_point()
def begin_parallel_block(self, code):
"""
Each OpenMP thread in a parallel section that contains a with gil block
must have the thread-state initialized. The call to
PyGILState_Release() then deallocates our threadstate. If we wouldn't
do this, each with gil block would allocate and deallocate one, thereby
losing exception information before it can be saved before leaving the
parallel section.
"""
self.begin_of_parallel_block = code.insertion_point()
def end_parallel_block(self, code):
"Acquire the GIL, deallocate threadstate, release"
if self.error_label_used:
begin_code = self.begin_of_parallel_block
end_code = code
begin_code.put_ensure_gil(declare_gilstate=True)
begin_code.putln("Py_BEGIN_ALLOW_THREADS")
end_code.putln("Py_END_ALLOW_THREADS")
end_code.put_release_ensured_gil()
def trap_parallel_exit(self, code, should_flush=False):
"""
Trap any kind of return inside a parallel construct. 'should_flush'
indicates whether the variable should be flushed, which is needed by
prange to skip the loop. It also indicates whether we need to register
a continue (we need this for parallel blocks, but not for prange
loops, as it is a direct jump there).
It uses the same mechanism as try/finally:
1 continue
2 break
3 return
4 error
"""
save_lastprivates_label = code.new_label()
dont_return_label = code.new_label()
insertion_point = code.insertion_point()
self.any_label_used = False
self.breaking_label_used = False
self.error_label_used = False
self.parallel_private_temps = []
all_labels = code.get_all_labels()
# Figure this out before starting to generate any code
for label in all_labels:
if code.label_used(label):
self.breaking_label_used = (self.breaking_label_used or
label != code.continue_label)
self.any_label_used = True
if self.any_label_used:
code.put_goto(dont_return_label)
for i, label in enumerate(all_labels):
if not code.label_used(label):
continue
is_continue_label = label == code.continue_label
code.put_label(label)
if not (should_flush and is_continue_label):
if label == code.error_label:
self.error_label_used = True
self.fetch_parallel_exception(code)
code.putln("%s = %d;" % (Naming.parallel_why, i + 1))
if (self.breaking_label_used and self.is_prange and not
is_continue_label):
code.put_goto(save_lastprivates_label)
else:
code.put_goto(dont_return_label)
if self.any_label_used:
if self.is_prange and self.breaking_label_used:
# Don't rely on lastprivate, save our lastprivates
code.put_label(save_lastprivates_label)
self.save_parallel_vars(code)
code.put_label(dont_return_label)
if should_flush and self.breaking_label_used:
code.putln_openmp("#pragma omp flush(%s)" % Naming.parallel_why)
def save_parallel_vars(self, code):
"""
The following shenanigans are instated when we break, return or
propagate errors from a prange. In this case we cannot rely on
lastprivate() to do its job, as no iterations may have executed yet
in the last thread, leaving the values undefined. It is most likely
that the breaking thread has well-defined values of the lastprivate
variables, so we keep those values.
"""
section_name = ("__pyx_parallel_lastprivates%d" %
self.critical_section_counter)
code.putln_openmp("#pragma omp critical(%s)" % section_name)
ParallelStatNode.critical_section_counter += 1
code.begin_block() # begin critical section
c = self.begin_of_parallel_control_block_point
temp_count = 0
for entry, (op, lastprivate) in self.privates.iteritems():
if not lastprivate or entry.type.is_pyobject:
continue
type_decl = entry.type.declaration_code("")
temp_cname = "__pyx_parallel_temp%d" % temp_count
private_cname = entry.cname
temp_count += 1
# Declare the parallel private in the outer block
c.putln("%s %s;" % (type_decl, temp_cname))
# Initialize before escaping
code.putln("%s = %s;" % (temp_cname, private_cname))
self.parallel_private_temps.append((temp_cname, private_cname))
code.end_block() # end critical section
def fetch_parallel_exception(self, code):
"""
As each OpenMP thread may raise an exception, we need to fetch that
exception from the threadstate and save it for after the parallel
section where it can be re-raised in the master thread.
Although it would seem that __pyx_filename, __pyx_lineno and
__pyx_clineno are only assigned to under exception conditions (i.e.,
when we have the GIL), and thus should be allowed to be shared without
any race condition, they are in fact subject to the same race
conditions that they were previously when they were global variables
and functions were allowed to release the GIL:
thread A thread B
acquire
set lineno
release
acquire
set lineno
release
acquire
fetch exception
release
skip the fetch
deallocate threadstate deallocate threadstate
"""
code.begin_block()
code.put_ensure_gil(declare_gilstate=True)
code.putln_openmp("#pragma omp flush(%s)" % Naming.parallel_exc_type)
code.putln(
"if (!%s) {" % Naming.parallel_exc_type)
code.putln("__Pyx_ErrFetch(&%s, &%s, &%s);" % self.parallel_exc)
pos_info = chain(*zip(self.parallel_pos_info, self.pos_info))
code.putln("%s = %s; %s = %s; %s = %s;" % tuple(pos_info))
code.putln('__Pyx_GOTREF(%s);' % Naming.parallel_exc_type)
code.putln(
"}")
code.put_release_ensured_gil()
code.end_block()
def restore_parallel_exception(self, code):
"Re-raise a parallel exception"
code.begin_block()
code.put_ensure_gil(declare_gilstate=True)
code.putln("__Pyx_ErrRestore(%s, %s, %s);" % self.parallel_exc)
pos_info = chain(*zip(self.pos_info, self.parallel_pos_info))
code.putln("%s = %s; %s = %s; %s = %s;" % tuple(pos_info))
code.putln("__Pyx_GIVEREF(%s);" % Naming.parallel_exc_type)
code.put_release_ensured_gil()
code.end_block()
def restore_labels(self, code):
"""
Restore all old labels. Call this before the 'else' clause to for
loops and always before ending the parallel control flow block.
"""
code.set_all_labels(self.old_loop_labels + (self.old_return_label,
self.old_error_label))
def end_parallel_control_flow_block(self, code,
break_=False, continue_=False):
"""
This ends the parallel control flow block and based on how the parallel
section was exited, takes the corresponding action. The break_ and
continue_ parameters indicate whether these should be propagated
outwards:
for i in prange(...):
with cython.parallel.parallel():
continue
Here break should be trapped in the parallel block, and propagated to
the for loop.
"""
c = self.begin_of_parallel_control_block_point
# Firstly, always prefer errors over returning, continue or break
if self.error_label_used:
c.putln("const char *%s; int %s, %s;" % self.parallel_pos_info)
c.putln("%s = NULL; %s = %s = 0;" % self.parallel_pos_info)
c.putln("PyObject *%s = NULL, *%s = NULL, *%s = NULL;" %
self.parallel_exc)
code.putln(
"if (%s) {" % Naming.parallel_exc_type)
code.putln("/* This may have been overridden by a continue, "
"break or return in another thread. Prefer the error. */")
code.putln("%s = 4;" % Naming.parallel_why)
code.putln(
"}")
if continue_:
any_label_used = self.any_label_used
else:
any_label_used = self.breaking_label_used
if any_label_used:
# __pyx_parallel_why is used, declare and initialize
c.putln("int %s;" % Naming.parallel_why)
c.putln("%s = 0;" % Naming.parallel_why)
code.putln(
"if (%s) {" % Naming.parallel_why)
for temp_cname, private_cname in self.parallel_private_temps:
code.putln("%s = %s;" % (private_cname, temp_cname))
code.putln("switch (%s) {" % Naming.parallel_why)
if continue_:
code.put(" case 1: ")
code.put_goto(code.continue_label)
if break_:
code.put(" case 2: ")
code.put_goto(code.break_label)
code.put(" case 3: ")
code.put_goto(code.return_label)
if self.error_label_used:
code.globalstate.use_utility_code(restore_exception_utility_code)
code.putln(" case 4:")
self.restore_parallel_exception(code)
code.put_goto(code.error_label)
code.putln("}") # end switch
code.putln(
"}") # end if
code.end_block() # end parallel control flow block
class ParallelWithBlockNode(ParallelStatNode):
"""
This node represents a 'with cython.parallel.parallel():' block
"""
valid_keyword_arguments = ['num_threads']
num_threads = None
def analyse_declarations(self, env):
super(ParallelWithBlockNode, self).analyse_declarations(env)
if self.args:
error(self.pos, "cython.parallel.parallel() does not take "
"positional arguments")
def generate_execution_code(self, code):
self.declare_closure_privates(code)
self.setup_parallel_control_flow_block(code)
code.putln("#ifdef _OPENMP")
code.put("#pragma omp parallel ")
if self.privates:
privates = [e.cname for e in self.privates
if not e.type.is_pyobject]
code.put('private(%s)' % ', '.join(privates))
self.privatization_insertion_point = code.insertion_point()
self.put_num_threads(code)
code.putln("")
code.putln("#endif /* _OPENMP */")
code.begin_block() # parallel block
self.begin_parallel_block(code)
self.initialize_privates_to_nan(code)
code.funcstate.start_collecting_temps()
self.body.generate_execution_code(code)
self.trap_parallel_exit(code)
self.privatize_temps(code)
self.end_parallel_block(code)
code.end_block() # end parallel block
continue_ = code.label_used(code.continue_label)
break_ = code.label_used(code.break_label)
self.restore_labels(code)
self.end_parallel_control_flow_block(code, break_=break_,
continue_=continue_)
self.release_closure_privates(code)
class ParallelRangeNode(ParallelStatNode):
"""
This node represents a 'for i in cython.parallel.prange():' construct.
target NameNode the target iteration variable
else_clause Node or None the else clause of this loop
"""
child_attrs = ['body', 'target', 'else_clause', 'args']
body = target = else_clause = args = None
start = stop = step = None
is_prange = True
nogil = None
schedule = None
num_threads = None
valid_keyword_arguments = ['schedule', 'nogil', 'num_threads']
def __init__(self, pos, **kwds):
super(ParallelRangeNode, self).__init__(pos, **kwds)
# Pretend to be a ForInStatNode for control flow analysis
self.iterator = PassStatNode(pos)
def analyse_declarations(self, env):
super(ParallelRangeNode, self).analyse_declarations(env)
self.target.analyse_target_declaration(env)
if self.else_clause is not None:
self.else_clause.analyse_declarations(env)
if not self.args or len(self.args) > 3:
error(self.pos, "Invalid number of positional arguments to prange")
return
if len(self.args) == 1:
self.stop, = self.args
elif len(self.args) == 2:
self.start, self.stop = self.args
else:
self.start, self.stop, self.step = self.args
if hasattr(self.schedule, 'decode'):
self.schedule = self.schedule.decode('ascii')
if self.schedule not in (None, 'static', 'dynamic', 'guided',
'runtime'):
error(self.pos, "Invalid schedule argument to prange: %s" %
(self.schedule,))
def analyse_expressions(self, env):
if self.nogil:
was_nogil = env.nogil
env.nogil = True
if self.target is None:
error(self.pos, "prange() can only be used as part of a for loop")
return
self.target.analyse_target_types(env)
if not self.target.type.is_numeric:
# Not a valid type, assume one for now anyway
if not self.target.type.is_pyobject:
# nogil_check will catch the is_pyobject case
error(self.target.pos,
"Must be of numeric type, not %s" % self.target.type)
self.index_type = PyrexTypes.c_py_ssize_t_type
else:
self.index_type = self.target.type
# Setup start, stop and step, allocating temps if needed
self.names = 'start', 'stop', 'step'
start_stop_step = self.start, self.stop, self.step
for node, name in zip(start_stop_step, self.names):
if node is not None:
node.analyse_types(env)
if not node.type.is_numeric:
error(node.pos, "%s argument must be numeric" % name)
continue
if not node.is_literal:
node = node.coerce_to_temp(env)
setattr(self, name, node)
# As we range from 0 to nsteps, computing the index along the
# way, we need a fitting type for 'i' and 'nsteps'
self.index_type = PyrexTypes.widest_numeric_type(
self.index_type, node.type)
if self.else_clause is not None:
self.else_clause.analyse_expressions(env)
# Although not actually an assignment in this scope, it should be
# treated as such to ensure it is unpacked if a closure temp, and to
# ensure lastprivate behaviour and propagation. If the target index is
# not a NameNode, it won't have an entry, and an error was issued by
# ParallelRangeTransform
if hasattr(self.target, 'entry'):
self.assignments[self.target.entry] = self.target.pos, None
super(ParallelRangeNode, self).analyse_expressions(env)
if self.nogil:
env.nogil = was_nogil
def nogil_check(self, env):
names = 'start', 'stop', 'step', 'target'
nodes = self.start, self.stop, self.step, self.target
for name, node in zip(names, nodes):
if node is not None and node.type.is_pyobject:
error(node.pos, "%s may not be a Python object "
"as we don't have the GIL" % name)
def generate_execution_code(self, code):
"""
Generate code in the following steps
1) copy any closure variables determined thread-private
into temporaries
2) allocate temps for start, stop and step
3) generate a loop that calculates the total number of steps,
which then computes the target iteration variable for every step:
for i in prange(start, stop, step):
...
becomes
nsteps = (stop - start) / step;
i = start;
#pragma omp parallel for lastprivate(i)
for (temp = 0; temp < nsteps; temp++) {
i = start + step * temp;
...
}
Note that accumulation of 'i' would have a data dependency
between iterations.
Also, you can't do this
for (i = start; i < stop; i += step)
...
as the '<' operator should become '>' for descending loops.
'for i from x < i < y:' does not suffer from this problem
as the relational operator is known at compile time!
4) release our temps and write back any private closure variables
"""
self.declare_closure_privates(code)
# This can only be a NameNode
target_index_cname = self.target.entry.cname
# This will be used as the dict to format our code strings, holding
# the start, stop , step, temps and target cnames
fmt_dict = {
'target': target_index_cname,
}
# Setup start, stop and step, allocating temps if needed
start_stop_step = self.start, self.stop, self.step
defaults = '0', '0', '1'
for node, name, default in zip(start_stop_step, self.names, defaults):
if node is None:
result = default
elif node.is_literal:
result = node.get_constant_c_result_code()
else:
node.generate_evaluation_code(code)
result = node.result()
fmt_dict[name] = result
fmt_dict['i'] = code.funcstate.allocate_temp(self.index_type, False)
fmt_dict['nsteps'] = code.funcstate.allocate_temp(self.index_type, False)
# TODO: check if the step is 0 and if so, raise an exception in a
# 'with gil' block. For now, just abort
code.putln("if (%(step)s == 0) abort();" % fmt_dict)
self.setup_parallel_control_flow_block(code) # parallel control flow block
self.control_flow_var_code_point = code.insertion_point()
# Note: nsteps is private in an outer scope if present
code.putln("%(nsteps)s = (%(stop)s - %(start)s) / %(step)s;" % fmt_dict)
# The target iteration variable might not be initialized, do it only if
# we are executing at least 1 iteration, otherwise we should leave the
# target unaffected. The target iteration variable is firstprivate to
# shut up compiler warnings caused by lastprivate, as the compiler
# erroneously believes that nsteps may be <= 0, leaving the private
# target index uninitialized
code.putln("if (%(nsteps)s > 0)" % fmt_dict)
code.begin_block() # if block
code.putln("%(target)s = 0;" % fmt_dict)
self.generate_loop(code, fmt_dict)
code.end_block() # end if block
self.restore_labels(code)
if self.else_clause:
if self.breaking_label_used:
code.put("if (%s < 2)" % Naming.parallel_why)
code.begin_block() # else block
code.putln("/* else */")
self.else_clause.generate_execution_code(code)
code.end_block() # end else block
# ------ cleanup ------
self.end_parallel_control_flow_block(code) # end parallel control flow block
# And finally, release our privates and write back any closure
# variables
for temp in start_stop_step:
if temp is not None:
temp.generate_disposal_code(code)
temp.free_temps(code)
code.funcstate.release_temp(fmt_dict['i'])
code.funcstate.release_temp(fmt_dict['nsteps'])
self.release_closure_privates(code)
def generate_loop(self, code, fmt_dict):
code.putln("#ifdef _OPENMP")
if not self.is_parallel:
code.put("#pragma omp for")
self.privatization_insertion_point = code.insertion_point()
reduction_codepoint = self.parent.privatization_insertion_point
else:
code.put("#pragma omp parallel")
self.privatization_insertion_point = code.insertion_point()
reduction_codepoint = self.privatization_insertion_point
code.putln("")
code.putln("#endif /* _OPENMP */")
code.begin_block() # pragma omp parallel begin block
# Initialize the GIL if needed for this thread
self.begin_parallel_block(code)
code.putln("#ifdef _OPENMP")
code.put("#pragma omp for")
for entry, (op, lastprivate) in self.privates.iteritems():
# Don't declare the index variable as a reduction
if op and op in "+*-&^|" and entry != self.target.entry:
if entry.type.is_pyobject:
error(self.pos, "Python objects cannot be reductions")
else:
#code.put(" reduction(%s:%s)" % (op, entry.cname))
# This is the only way reductions + nesting works in gcc4.5
reduction_codepoint.put(
" reduction(%s:%s)" % (op, entry.cname))
else:
if entry == self.target.entry:
code.put(" firstprivate(%s)" % entry.cname)
code.put(" lastprivate(%s)" % entry.cname)
continue
if not entry.type.is_pyobject:
if lastprivate:
private = 'lastprivate'
else:
private = 'private'
code.put(" %s(%s)" % (private, entry.cname))
if self.schedule:
code.put(" schedule(%s)" % self.schedule)
self.put_num_threads(reduction_codepoint)
code.putln("")
code.putln("#endif /* _OPENMP */")
code.put("for (%(i)s = 0; %(i)s < %(nsteps)s; %(i)s++)" % fmt_dict)
code.begin_block() # for loop block
guard_around_body_codepoint = code.insertion_point()
# Start if guard block around the body. This may be unnecessary, but
# at least it doesn't spoil indentation
code.begin_block()
code.putln("%(target)s = %(start)s + %(step)s * %(i)s;" % fmt_dict)
self.initialize_privates_to_nan(code, exclude=self.target.entry)
if self.is_parallel:
code.funcstate.start_collecting_temps()
self.body.generate_execution_code(code)
self.trap_parallel_exit(code, should_flush=True)
self.privatize_temps(code)
if self.breaking_label_used:
# Put a guard around the loop body in case return, break or
# exceptions might be used
guard_around_body_codepoint.putln("if (%s < 2)" % Naming.parallel_why)
code.end_block() # end guard around loop body
code.end_block() # end for loop block
if self.is_parallel:
# Release the GIL and deallocate the thread state
self.end_parallel_block(code)
code.end_block() # pragma omp parallel end block
class CnameDecoratorNode(StatNode):
"""
This node is for the cname decorator in CythonUtilityCode:
@cname('the_cname')
cdef func(...):
...
In case of a cdef class the cname specifies the objstruct_cname.
node the node to which the cname decorator is applied
cname the cname the node should get
"""
child_attrs = ['node']
def analyse_declarations(self, env):
self.node.analyse_declarations(env)
self.is_function = isinstance(self.node, FuncDefNode)
is_struct_or_enum = isinstance(self.node, (CStructOrUnionDefNode,
CEnumDefNode))
e = self.node.entry
if self.is_function:
e.cname = self.cname
e.func_cname = self.cname
elif is_struct_or_enum:
e.cname = e.type.cname = self.cname
else:
scope = self.node.scope
e.cname = self.cname
e.type.objstruct_cname = self.cname + '_obj'
e.type.typeobj_cname = Naming.typeobj_prefix + self.cname
e.type.typeptr_cname = self.cname + '_type'
e.as_variable.cname = py_object_type.cast_code(e.type.typeptr_cname)
scope.scope_prefix = self.cname + "_"
for name, entry in scope.entries.iteritems():
if entry.func_cname:
cname = entry.cname
if '.' in cname:
# remove __pyx_base from func_cname
cname = cname.split('.')[-1]
entry.func_cname = '%s_%s' % (self.cname, cname)
def analyse_expressions(self, env):
self.node.analyse_expressions(env)
def generate_function_definitions(self, env, code):
"Ensure a prototype for every @cname method in the right place"
if self.is_function and env.is_c_class_scope:
# method in cdef class, generate a prototype in the header
h_code = code.globalstate['utility_code_proto']
if isinstance(self.node, DefNode):
self.node.generate_function_header(
h_code, with_pymethdef=False, proto_only=True)
else:
import ModuleNode
entry = self.node.entry
cname = entry.cname
entry.cname = entry.func_cname
ModuleNode.generate_cfunction_declaration(
entry,
env.global_scope(),
h_code,
definition=True)
entry.cname = cname
self.node.generate_function_definitions(env, code)
def generate_execution_code(self, code):
self.node.generate_execution_code(code)
#------------------------------------------------------------------------------------
#
# Runtime support code
#
#------------------------------------------------------------------------------------
utility_function_predeclarations = \
"""
/* inline attribute */
#ifndef CYTHON_INLINE
#if defined(__GNUC__)
#define CYTHON_INLINE __inline__
#elif defined(_MSC_VER)
#define CYTHON_INLINE __inline
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define CYTHON_INLINE inline
#else
#define CYTHON_INLINE
#endif
#endif
/* unused attribute */
#ifndef CYTHON_UNUSED
# if defined(__GNUC__)
# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
# define CYTHON_UNUSED __attribute__ ((__unused__))
# else
# define CYTHON_UNUSED
# endif
# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
# define CYTHON_UNUSED __attribute__ ((__unused__))
# else
# define CYTHON_UNUSED
# endif
#endif
typedef struct {PyObject **p; char *s; const long n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/
"""
if Options.gcc_branch_hints:
branch_prediction_macros = \
"""
#ifdef __GNUC__
/* Test for GCC > 2.95 */
#if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))
#define likely(x) __builtin_expect(!!(x), 1)
#define unlikely(x) __builtin_expect(!!(x), 0)
#else /* __GNUC__ > 2 ... */
#define likely(x) (x)
#define unlikely(x) (x)
#endif /* __GNUC__ > 2 ... */
#else /* __GNUC__ */
#define likely(x) (x)
#define unlikely(x) (x)
#endif /* __GNUC__ */
"""
else:
branch_prediction_macros = \
"""
#define likely(x) (x)
#define unlikely(x) (x)
"""
#get_name_predeclaration = \
#"static PyObject *__Pyx_GetName(PyObject *dict, char *name); /*proto*/"
#get_name_interned_predeclaration = \
#"static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/"
#------------------------------------------------------------------------------------
printing_utility_code = UtilityCode(
proto = """
static int __Pyx_Print(PyObject*, PyObject *, int); /*proto*/
#if PY_MAJOR_VERSION >= 3
static PyObject* %s = 0;
static PyObject* %s = 0;
#endif
""" % (Naming.print_function, Naming.print_function_kwargs),
cleanup = """
#if PY_MAJOR_VERSION >= 3
Py_CLEAR(%s);
Py_CLEAR(%s);
#endif
""" % (Naming.print_function, Naming.print_function_kwargs),
impl = r"""
#if PY_MAJOR_VERSION < 3
static PyObject *__Pyx_GetStdout(void) {
PyObject *f = PySys_GetObject((char *)"stdout");
if (!f) {
PyErr_SetString(PyExc_RuntimeError, "lost sys.stdout");
}
return f;
}
static int __Pyx_Print(PyObject* f, PyObject *arg_tuple, int newline) {
PyObject* v;
int i;
if (!f) {
if (!(f = __Pyx_GetStdout()))
return -1;
}
for (i=0; i < PyTuple_GET_SIZE(arg_tuple); i++) {
if (PyFile_SoftSpace(f, 1)) {
if (PyFile_WriteString(" ", f) < 0)
return -1;
}
v = PyTuple_GET_ITEM(arg_tuple, i);
if (PyFile_WriteObject(v, f, Py_PRINT_RAW) < 0)
return -1;
if (PyString_Check(v)) {
char *s = PyString_AsString(v);
Py_ssize_t len = PyString_Size(v);
if (len > 0 &&
isspace(Py_CHARMASK(s[len-1])) &&
s[len-1] != ' ')
PyFile_SoftSpace(f, 0);
}
}
if (newline) {
if (PyFile_WriteString("\n", f) < 0)
return -1;
PyFile_SoftSpace(f, 0);
}
return 0;
}
#else /* Python 3 has a print function */
static int __Pyx_Print(PyObject* stream, PyObject *arg_tuple, int newline) {
PyObject* kwargs = 0;
PyObject* result = 0;
PyObject* end_string;
if (unlikely(!%(PRINT_FUNCTION)s)) {
%(PRINT_FUNCTION)s = __Pyx_GetAttrString(%(BUILTINS)s, "print");
if (!%(PRINT_FUNCTION)s)
return -1;
}
if (stream) {
kwargs = PyDict_New();
if (unlikely(!kwargs))
return -1;
if (unlikely(PyDict_SetItemString(kwargs, "file", stream) < 0))
goto bad;
if (!newline) {
end_string = PyUnicode_FromStringAndSize(" ", 1);
if (unlikely(!end_string))
goto bad;
if (PyDict_SetItemString(kwargs, "end", end_string) < 0) {
Py_DECREF(end_string);
goto bad;
}
Py_DECREF(end_string);
}
} else if (!newline) {
if (unlikely(!%(PRINT_KWARGS)s)) {
%(PRINT_KWARGS)s = PyDict_New();
if (unlikely(!%(PRINT_KWARGS)s))
return -1;
end_string = PyUnicode_FromStringAndSize(" ", 1);
if (unlikely(!end_string))
return -1;
if (PyDict_SetItemString(%(PRINT_KWARGS)s, "end", end_string) < 0) {
Py_DECREF(end_string);
return -1;
}
Py_DECREF(end_string);
}
kwargs = %(PRINT_KWARGS)s;
}
result = PyObject_Call(%(PRINT_FUNCTION)s, arg_tuple, kwargs);
if (unlikely(kwargs) && (kwargs != %(PRINT_KWARGS)s))
Py_DECREF(kwargs);
if (!result)
return -1;
Py_DECREF(result);
return 0;
bad:
if (kwargs != %(PRINT_KWARGS)s)
Py_XDECREF(kwargs);
return -1;
}
#endif
""" % {'BUILTINS' : Naming.builtins_cname,
'PRINT_FUNCTION' : Naming.print_function,
'PRINT_KWARGS' : Naming.print_function_kwargs}
)
printing_one_utility_code = UtilityCode(
proto = """
static int __Pyx_PrintOne(PyObject* stream, PyObject *o); /*proto*/
""",
impl = r"""
#if PY_MAJOR_VERSION < 3
static int __Pyx_PrintOne(PyObject* f, PyObject *o) {
if (!f) {
if (!(f = __Pyx_GetStdout()))
return -1;
}
if (PyFile_SoftSpace(f, 0)) {
if (PyFile_WriteString(" ", f) < 0)
return -1;
}
if (PyFile_WriteObject(o, f, Py_PRINT_RAW) < 0)
return -1;
if (PyFile_WriteString("\n", f) < 0)
return -1;
return 0;
/* the line below is just to avoid compiler
* compiler warnings about unused functions */
return __Pyx_Print(f, NULL, 0);
}
#else /* Python 3 has a print function */
static int __Pyx_PrintOne(PyObject* stream, PyObject *o) {
int res;
PyObject* arg_tuple = PyTuple_New(1);
if (unlikely(!arg_tuple))
return -1;
Py_INCREF(o);
PyTuple_SET_ITEM(arg_tuple, 0, o);
res = __Pyx_Print(stream, arg_tuple, 1);
Py_DECREF(arg_tuple);
return res;
}
#endif
""",
requires=[printing_utility_code])
#------------------------------------------------------------------------------------
# Exception raising code
#
# Exceptions are raised by __Pyx_Raise() and stored as plain
# type/value/tb in PyThreadState->curexc_*. When being caught by an
# 'except' statement, curexc_* is moved over to exc_* by
# __Pyx_GetException()
restore_exception_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb); /*proto*/
static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb); /*proto*/
""",
impl = """
static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
PyThreadState *tstate = PyThreadState_GET();
tmp_type = tstate->curexc_type;
tmp_value = tstate->curexc_value;
tmp_tb = tstate->curexc_traceback;
tstate->curexc_type = type;
tstate->curexc_value = value;
tstate->curexc_traceback = tb;
Py_XDECREF(tmp_type);
Py_XDECREF(tmp_value);
Py_XDECREF(tmp_tb);
}
static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb) {
PyThreadState *tstate = PyThreadState_GET();
*type = tstate->curexc_type;
*value = tstate->curexc_value;
*tb = tstate->curexc_traceback;
tstate->curexc_type = 0;
tstate->curexc_value = 0;
tstate->curexc_traceback = 0;
}
""")
# The following function is based on do_raise() from ceval.c. There
# are separate versions for Python2 and Python3 as exception handling
# has changed quite a lot between the two versions.
raise_utility_code = UtilityCode(
proto = """
static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); /*proto*/
""",
impl = """
#if PY_MAJOR_VERSION < 3
static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) {
/* cause is unused */
Py_XINCREF(type);
Py_XINCREF(value);
Py_XINCREF(tb);
/* First, check the traceback argument, replacing None with NULL. */
if (tb == Py_None) {
Py_DECREF(tb);
tb = 0;
}
else if (tb != NULL && !PyTraceBack_Check(tb)) {
PyErr_SetString(PyExc_TypeError,
"raise: arg 3 must be a traceback or None");
goto raise_error;
}
/* Next, replace a missing value with None */
if (value == NULL) {
value = Py_None;
Py_INCREF(value);
}
#if PY_VERSION_HEX < 0x02050000
if (!PyClass_Check(type))
#else
if (!PyType_Check(type))
#endif
{
/* Raising an instance. The value should be a dummy. */
if (value != Py_None) {
PyErr_SetString(PyExc_TypeError,
"instance exception may not have a separate value");
goto raise_error;
}
/* Normalize to raise <class>, <instance> */
Py_DECREF(value);
value = type;
#if PY_VERSION_HEX < 0x02050000
if (PyInstance_Check(type)) {
type = (PyObject*) ((PyInstanceObject*)type)->in_class;
Py_INCREF(type);
}
else {
type = 0;
PyErr_SetString(PyExc_TypeError,
"raise: exception must be an old-style class or instance");
goto raise_error;
}
#else
type = (PyObject*) Py_TYPE(type);
Py_INCREF(type);
if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) {
PyErr_SetString(PyExc_TypeError,
"raise: exception class must be a subclass of BaseException");
goto raise_error;
}
#endif
}
__Pyx_ErrRestore(type, value, tb);
return;
raise_error:
Py_XDECREF(value);
Py_XDECREF(type);
Py_XDECREF(tb);
return;
}
#else /* Python 3+ */
static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) {
if (tb == Py_None) {
tb = 0;
} else if (tb && !PyTraceBack_Check(tb)) {
PyErr_SetString(PyExc_TypeError,
"raise: arg 3 must be a traceback or None");
goto bad;
}
if (value == Py_None)
value = 0;
if (PyExceptionInstance_Check(type)) {
if (value) {
PyErr_SetString(PyExc_TypeError,
"instance exception may not have a separate value");
goto bad;
}
value = type;
type = (PyObject*) Py_TYPE(value);
} else if (!PyExceptionClass_Check(type)) {
PyErr_SetString(PyExc_TypeError,
"raise: exception class must be a subclass of BaseException");
goto bad;
}
if (cause) {
PyObject *fixed_cause;
if (PyExceptionClass_Check(cause)) {
fixed_cause = PyObject_CallObject(cause, NULL);
if (fixed_cause == NULL)
goto bad;
}
else if (PyExceptionInstance_Check(cause)) {
fixed_cause = cause;
Py_INCREF(fixed_cause);
}
else {
PyErr_SetString(PyExc_TypeError,
"exception causes must derive from "
"BaseException");
goto bad;
}
if (!value) {
value = PyObject_CallObject(type, NULL);
}
PyException_SetCause(value, fixed_cause);
}
PyErr_SetObject(type, value);
if (tb) {
PyThreadState *tstate = PyThreadState_GET();
PyObject* tmp_tb = tstate->curexc_traceback;
if (tb != tmp_tb) {
Py_INCREF(tb);
tstate->curexc_traceback = tb;
Py_XDECREF(tmp_tb);
}
}
bad:
return;
}
#endif
""",
requires=[restore_exception_utility_code])
#------------------------------------------------------------------------------------
get_exception_utility_code = UtilityCode(
proto = """
static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); /*proto*/
""",
impl = """
static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) {
PyObject *local_type, *local_value, *local_tb;
PyObject *tmp_type, *tmp_value, *tmp_tb;
PyThreadState *tstate = PyThreadState_GET();
local_type = tstate->curexc_type;
local_value = tstate->curexc_value;
local_tb = tstate->curexc_traceback;
tstate->curexc_type = 0;
tstate->curexc_value = 0;
tstate->curexc_traceback = 0;
PyErr_NormalizeException(&local_type, &local_value, &local_tb);
if (unlikely(tstate->curexc_type))
goto bad;
#if PY_MAJOR_VERSION >= 3
if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0))
goto bad;
#endif
*type = local_type;
*value = local_value;
*tb = local_tb;
Py_INCREF(local_type);
Py_INCREF(local_value);
Py_INCREF(local_tb);
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = local_type;
tstate->exc_value = local_value;
tstate->exc_traceback = local_tb;
/* Make sure tstate is in a consistent state when we XDECREF
these objects (XDECREF may run arbitrary code). */
Py_XDECREF(tmp_type);
Py_XDECREF(tmp_value);
Py_XDECREF(tmp_tb);
return 0;
bad:
*type = 0;
*value = 0;
*tb = 0;
Py_XDECREF(local_type);
Py_XDECREF(local_value);
Py_XDECREF(local_tb);
return -1;
}
""")
#------------------------------------------------------------------------------------
get_exception_tuple_utility_code = UtilityCode(proto="""
static PyObject *__Pyx_GetExceptionTuple(void); /*proto*/
""",
# I doubt that calling __Pyx_GetException() here is correct as it moves
# the exception from tstate->curexc_* to tstate->exc_*, which prevents
# exception handlers later on from receiving it.
impl = """
static PyObject *__Pyx_GetExceptionTuple(void) {
PyObject *type = NULL, *value = NULL, *tb = NULL;
if (__Pyx_GetException(&type, &value, &tb) == 0) {
PyObject* exc_info = PyTuple_New(3);
if (exc_info) {
Py_INCREF(type);
Py_INCREF(value);
Py_INCREF(tb);
PyTuple_SET_ITEM(exc_info, 0, type);
PyTuple_SET_ITEM(exc_info, 1, value);
PyTuple_SET_ITEM(exc_info, 2, tb);
return exc_info;
}
}
return NULL;
}
""",
requires=[get_exception_utility_code])
#------------------------------------------------------------------------------------
reset_exception_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE void __Pyx_ExceptionSave(PyObject **type, PyObject **value, PyObject **tb); /*proto*/
static void __Pyx_ExceptionReset(PyObject *type, PyObject *value, PyObject *tb); /*proto*/
""",
impl = """
static CYTHON_INLINE void __Pyx_ExceptionSave(PyObject **type, PyObject **value, PyObject **tb) {
PyThreadState *tstate = PyThreadState_GET();
*type = tstate->exc_type;
*value = tstate->exc_value;
*tb = tstate->exc_traceback;
Py_XINCREF(*type);
Py_XINCREF(*value);
Py_XINCREF(*tb);
}
static void __Pyx_ExceptionReset(PyObject *type, PyObject *value, PyObject *tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
PyThreadState *tstate = PyThreadState_GET();
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = type;
tstate->exc_value = value;
tstate->exc_traceback = tb;
Py_XDECREF(tmp_type);
Py_XDECREF(tmp_value);
Py_XDECREF(tmp_tb);
}
""")
#------------------------------------------------------------------------------------
swap_exception_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); /*proto*/
""",
impl = """
static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
PyThreadState *tstate = PyThreadState_GET();
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = *type;
tstate->exc_value = *value;
tstate->exc_traceback = *tb;
*type = tmp_type;
*value = tmp_value;
*tb = tmp_tb;
}
""")
#------------------------------------------------------------------------------------
arg_type_test_utility_code = UtilityCode(
proto = """
static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed,
const char *name, int exact); /*proto*/
""",
impl = """
static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed,
const char *name, int exact)
{
if (!type) {
PyErr_Format(PyExc_SystemError, "Missing type object");
return 0;
}
if (none_allowed && obj == Py_None) return 1;
else if (exact) {
if (Py_TYPE(obj) == type) return 1;
}
else {
if (PyObject_TypeCheck(obj, type)) return 1;
}
PyErr_Format(PyExc_TypeError,
"Argument '%s' has incorrect type (expected %s, got %s)",
name, type->tp_name, Py_TYPE(obj)->tp_name);
return 0;
}
""")
#------------------------------------------------------------------------------------
#
# __Pyx_RaiseArgtupleInvalid raises the correct exception when too
# many or too few positional arguments were found. This handles
# Py_ssize_t formatting correctly.
raise_argtuple_invalid_utility_code = UtilityCode(
proto = """
static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,
Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /*proto*/
""",
impl = """
static void __Pyx_RaiseArgtupleInvalid(
const char* func_name,
int exact,
Py_ssize_t num_min,
Py_ssize_t num_max,
Py_ssize_t num_found)
{
Py_ssize_t num_expected;
const char *more_or_less;
if (num_found < num_min) {
num_expected = num_min;
more_or_less = "at least";
} else {
num_expected = num_max;
more_or_less = "at most";
}
if (exact) {
more_or_less = "exactly";
}
PyErr_Format(PyExc_TypeError,
"%s() takes %s %"PY_FORMAT_SIZE_T"d positional argument%s (%"PY_FORMAT_SIZE_T"d given)",
func_name, more_or_less, num_expected,
(num_expected == 1) ? "" : "s", num_found);
}
""")
raise_keyword_required_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE void __Pyx_RaiseKeywordRequired(const char* func_name, PyObject* kw_name); /*proto*/
""",
impl = """
static CYTHON_INLINE void __Pyx_RaiseKeywordRequired(
const char* func_name,
PyObject* kw_name)
{
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION >= 3
"%s() needs keyword-only argument %U", func_name, kw_name);
#else
"%s() needs keyword-only argument %s", func_name,
PyString_AS_STRING(kw_name));
#endif
}
""")
raise_double_keywords_utility_code = UtilityCode(
proto = """
static void __Pyx_RaiseDoubleKeywordsError(
const char* func_name, PyObject* kw_name); /*proto*/
""",
impl = """
static void __Pyx_RaiseDoubleKeywordsError(
const char* func_name,
PyObject* kw_name)
{
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION >= 3
"%s() got multiple values for keyword argument '%U'", func_name, kw_name);
#else
"%s() got multiple values for keyword argument '%s'", func_name,
PyString_AS_STRING(kw_name));
#endif
}
""")
#------------------------------------------------------------------------------------
#
# __Pyx_CheckKeywordStrings raises an error if non-string keywords
# were passed to a function, or if any keywords were passed to a
# function that does not accept them.
keyword_string_check_utility_code = UtilityCode(
proto = """
static CYTHON_INLINE int __Pyx_CheckKeywordStrings(PyObject *kwdict,
const char* function_name, int kw_allowed); /*proto*/
""",
impl = """
static CYTHON_INLINE int __Pyx_CheckKeywordStrings(
PyObject *kwdict,
const char* function_name,
int kw_allowed)
{
PyObject* key = 0;
Py_ssize_t pos = 0;
while (PyDict_Next(kwdict, &pos, &key, 0)) {
#if PY_MAJOR_VERSION < 3
if (unlikely(!PyString_CheckExact(key)) && unlikely(!PyString_Check(key)))
#else
if (unlikely(!PyUnicode_CheckExact(key)) && unlikely(!PyUnicode_Check(key)))
#endif
goto invalid_keyword_type;
}
if ((!kw_allowed) && unlikely(key))
goto invalid_keyword;
return 1;
invalid_keyword_type:
PyErr_Format(PyExc_TypeError,
"%s() keywords must be strings", function_name);
return 0;
invalid_keyword:
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION < 3
"%s() got an unexpected keyword argument '%s'",
function_name, PyString_AsString(key));
#else
"%s() got an unexpected keyword argument '%U'",
function_name, key);
#endif
return 0;
}
""")
#------------------------------------------------------------------------------------
#
# __Pyx_ParseOptionalKeywords copies the optional/unknown keyword
# arguments from the kwds dict into kwds2. If kwds2 is NULL, unknown
# keywords will raise an invalid keyword error.
#
# Three kinds of errors are checked: 1) non-string keywords, 2)
# unexpected keywords and 3) overlap with positional arguments.
#
# If num_posargs is greater 0, it denotes the number of positional
# arguments that were passed and that must therefore not appear
# amongst the keywords as well.
#
# This method does not check for required keyword arguments.
#
parse_keywords_utility_code = UtilityCode(
proto = """
static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[], \
PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, \
const char* function_name); /*proto*/
""",
impl = """
static int __Pyx_ParseOptionalKeywords(
PyObject *kwds,
PyObject **argnames[],
PyObject *kwds2,
PyObject *values[],
Py_ssize_t num_pos_args,
const char* function_name)
{
PyObject *key = 0, *value = 0;
Py_ssize_t pos = 0;
PyObject*** name;
PyObject*** first_kw_arg = argnames + num_pos_args;
while (PyDict_Next(kwds, &pos, &key, &value)) {
name = first_kw_arg;
while (*name && (**name != key)) name++;
if (*name) {
values[name-argnames] = value;
} else {
#if PY_MAJOR_VERSION < 3
if (unlikely(!PyString_CheckExact(key)) && unlikely(!PyString_Check(key))) {
#else
if (unlikely(!PyUnicode_CheckExact(key)) && unlikely(!PyUnicode_Check(key))) {
#endif
goto invalid_keyword_type;
} else {
for (name = first_kw_arg; *name; name++) {
#if PY_MAJOR_VERSION >= 3
if (PyUnicode_GET_SIZE(**name) == PyUnicode_GET_SIZE(key) &&
PyUnicode_Compare(**name, key) == 0) break;
#else
if (PyString_GET_SIZE(**name) == PyString_GET_SIZE(key) &&
_PyString_Eq(**name, key)) break;
#endif
}
if (*name) {
values[name-argnames] = value;
} else {
/* unexpected keyword found */
for (name=argnames; name != first_kw_arg; name++) {
if (**name == key) goto arg_passed_twice;
#if PY_MAJOR_VERSION >= 3
if (PyUnicode_GET_SIZE(**name) == PyUnicode_GET_SIZE(key) &&
PyUnicode_Compare(**name, key) == 0) goto arg_passed_twice;
#else
if (PyString_GET_SIZE(**name) == PyString_GET_SIZE(key) &&
_PyString_Eq(**name, key)) goto arg_passed_twice;
#endif
}
if (kwds2) {
if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad;
} else {
goto invalid_keyword;
}
}
}
}
}
return 0;
arg_passed_twice:
__Pyx_RaiseDoubleKeywordsError(function_name, **name);
goto bad;
invalid_keyword_type:
PyErr_Format(PyExc_TypeError,
"%s() keywords must be strings", function_name);
goto bad;
invalid_keyword:
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION < 3
"%s() got an unexpected keyword argument '%s'",
function_name, PyString_AsString(key));
#else
"%s() got an unexpected keyword argument '%U'",
function_name, key);
#endif
bad:
return -1;
}
""",
requires=[raise_double_keywords_utility_code])
#------------------------------------------------------------------------------------
traceback_utility_code = UtilityCode(
proto = """
static void __Pyx_AddTraceback(const char *funcname, int %(CLINENO)s,
int %(LINENO)s, const char *%(FILENAME)s); /*proto*/
""" % {
'FILENAME': Naming.filename_cname,
'LINENO': Naming.lineno_cname,
'CLINENO': Naming.clineno_cname,
},
impl = """
#include "compile.h"
#include "frameobject.h"
#include "traceback.h"
static void __Pyx_AddTraceback(const char *funcname, int %(CLINENO)s,
int %(LINENO)s, const char *%(FILENAME)s) {
PyObject *py_srcfile = 0;
PyObject *py_funcname = 0;
PyObject *py_globals = 0;
PyCodeObject *py_code = 0;
PyFrameObject *py_frame = 0;
#if PY_MAJOR_VERSION < 3
py_srcfile = PyString_FromString(%(FILENAME)s);
#else
py_srcfile = PyUnicode_FromString(%(FILENAME)s);
#endif
if (!py_srcfile) goto bad;
if (%(CLINENO)s) {
#if PY_MAJOR_VERSION < 3
py_funcname = PyString_FromFormat( "%%s (%%s:%%d)", funcname, %(CFILENAME)s, %(CLINENO)s);
#else
py_funcname = PyUnicode_FromFormat( "%%s (%%s:%%d)", funcname, %(CFILENAME)s, %(CLINENO)s);
#endif
}
else {
#if PY_MAJOR_VERSION < 3
py_funcname = PyString_FromString(funcname);
#else
py_funcname = PyUnicode_FromString(funcname);
#endif
}
if (!py_funcname) goto bad;
py_globals = PyModule_GetDict(%(GLOBALS)s);
if (!py_globals) goto bad;
py_code = __Pyx_PyCode_New(
0, /*int argcount,*/
0, /*int kwonlyargcount,*/
0, /*int nlocals,*/
0, /*int stacksize,*/
0, /*int flags,*/
%(EMPTY_BYTES)s, /*PyObject *code,*/
%(EMPTY_TUPLE)s, /*PyObject *consts,*/
%(EMPTY_TUPLE)s, /*PyObject *names,*/
%(EMPTY_TUPLE)s, /*PyObject *varnames,*/
%(EMPTY_TUPLE)s, /*PyObject *freevars,*/
%(EMPTY_TUPLE)s, /*PyObject *cellvars,*/
py_srcfile, /*PyObject *filename,*/
py_funcname, /*PyObject *name,*/
%(LINENO)s, /*int firstlineno,*/
%(EMPTY_BYTES)s /*PyObject *lnotab*/
);
if (!py_code) goto bad;
py_frame = PyFrame_New(
PyThreadState_GET(), /*PyThreadState *tstate,*/
py_code, /*PyCodeObject *code,*/
py_globals, /*PyObject *globals,*/
0 /*PyObject *locals*/
);
if (!py_frame) goto bad;
py_frame->f_lineno = %(LINENO)s;
PyTraceBack_Here(py_frame);
bad:
Py_XDECREF(py_srcfile);
Py_XDECREF(py_funcname);
Py_XDECREF(py_code);
Py_XDECREF(py_frame);
}
""" % {
'FILENAME': Naming.filename_cname,
'LINENO': Naming.lineno_cname,
'CFILENAME': Naming.cfilenm_cname,
'CLINENO': Naming.clineno_cname,
'GLOBALS': Naming.module_cname,
'EMPTY_TUPLE' : Naming.empty_tuple,
'EMPTY_BYTES' : Naming.empty_bytes,
})
#------------------------------------------------------------------------------------
unraisable_exception_utility_code = UtilityCode(
proto = """
static void __Pyx_WriteUnraisable(const char *name, int clineno,
int lineno, const char *filename); /*proto*/
""",
impl = """
static void __Pyx_WriteUnraisable(const char *name, int clineno,
int lineno, const char *filename) {
PyObject *old_exc, *old_val, *old_tb;
PyObject *ctx;
__Pyx_ErrFetch(&old_exc, &old_val, &old_tb);
#if PY_MAJOR_VERSION < 3
ctx = PyString_FromString(name);
#else
ctx = PyUnicode_FromString(name);
#endif
__Pyx_ErrRestore(old_exc, old_val, old_tb);
if (!ctx) {
PyErr_WriteUnraisable(Py_None);
} else {
PyErr_WriteUnraisable(ctx);
Py_DECREF(ctx);
}
}
""",
requires=[restore_exception_utility_code])
#------------------------------------------------------------------------------------
set_vtable_utility_code = UtilityCode(
proto = """
static int __Pyx_SetVtable(PyObject *dict, void *vtable); /*proto*/
""",
impl = """
static int __Pyx_SetVtable(PyObject *dict, void *vtable) {
#if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION==3&&PY_MINOR_VERSION==0)
PyObject *ob = PyCapsule_New(vtable, 0, 0);
#else
PyObject *ob = PyCObject_FromVoidPtr(vtable, 0);
#endif
if (!ob)
goto bad;
if (PyDict_SetItemString(dict, "__pyx_vtable__", ob) < 0)
goto bad;
Py_DECREF(ob);
return 0;
bad:
Py_XDECREF(ob);
return -1;
}
""")
#------------------------------------------------------------------------------------
get_vtable_utility_code = UtilityCode(
proto = """
static void* __Pyx_GetVtable(PyObject *dict); /*proto*/
""",
impl = r"""
static void* __Pyx_GetVtable(PyObject *dict) {
void* ptr;
PyObject *ob = PyMapping_GetItemString(dict, (char *)"__pyx_vtable__");
if (!ob)
goto bad;
#if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION==3&&PY_MINOR_VERSION==0)
ptr = PyCapsule_GetPointer(ob, 0);
#else
ptr = PyCObject_AsVoidPtr(ob);
#endif
if (!ptr && !PyErr_Occurred())
PyErr_SetString(PyExc_RuntimeError, "invalid vtable found for imported type");
Py_DECREF(ob);
return ptr;
bad:
Py_XDECREF(ob);
return NULL;
}
""")
#------------------------------------------------------------------------------------
init_string_tab_utility_code = UtilityCode(
proto = """
static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/
""",
impl = """
static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
while (t->p) {
#if PY_MAJOR_VERSION < 3
if (t->is_unicode) {
*t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);
} else if (t->intern) {
*t->p = PyString_InternFromString(t->s);
} else {
*t->p = PyString_FromStringAndSize(t->s, t->n - 1);
}
#else /* Python 3+ has unicode identifiers */
if (t->is_unicode | t->is_str) {
if (t->intern) {
*t->p = PyUnicode_InternFromString(t->s);
} else if (t->encoding) {
*t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);
} else {
*t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);
}
} else {
*t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);
}
#endif
if (!*t->p)
return -1;
++t;
}
return 0;
}
""")
#------------------------------------------------------------------------------------
force_init_threads_utility_code = UtilityCode(
proto="""
#ifndef __PYX_FORCE_INIT_THREADS
#define __PYX_FORCE_INIT_THREADS 0
#endif
""")
init_threads = UtilityCode(
init="PyEval_InitThreads();\n",
)
#------------------------------------------------------------------------------------
# Note that cPython ignores PyTrace_EXCEPTION,
# but maybe some other profilers don't.
profile_utility_code = UtilityCode(proto="""
#ifndef CYTHON_PROFILE
#define CYTHON_PROFILE 1
#endif
#ifndef CYTHON_PROFILE_REUSE_FRAME
#define CYTHON_PROFILE_REUSE_FRAME 0
#endif
#if CYTHON_PROFILE
#include "compile.h"
#include "frameobject.h"
#include "traceback.h"
#if CYTHON_PROFILE_REUSE_FRAME
#define CYTHON_FRAME_MODIFIER static
#define CYTHON_FRAME_DEL
#else
#define CYTHON_FRAME_MODIFIER
#define CYTHON_FRAME_DEL Py_DECREF(%(FRAME)s)
#endif
#define __Pyx_TraceDeclarations \\
static PyCodeObject *%(FRAME_CODE)s = NULL; \\
CYTHON_FRAME_MODIFIER PyFrameObject *%(FRAME)s = NULL; \\
int __Pyx_use_tracing = 0;
#define __Pyx_TraceCall(funcname, srcfile, firstlineno) \\
if (unlikely(PyThreadState_GET()->use_tracing && PyThreadState_GET()->c_profilefunc)) { \\
__Pyx_use_tracing = __Pyx_TraceSetupAndCall(&%(FRAME_CODE)s, &%(FRAME)s, funcname, srcfile, firstlineno); \\
}
#define __Pyx_TraceException() \\
if (unlikely(__Pyx_use_tracing( && PyThreadState_GET()->use_tracing && PyThreadState_GET()->c_profilefunc) { \\
PyObject *exc_info = __Pyx_GetExceptionTuple(); \\
if (exc_info) { \\
PyThreadState_GET()->c_profilefunc( \\
PyThreadState_GET()->c_profileobj, %(FRAME)s, PyTrace_EXCEPTION, exc_info); \\
Py_DECREF(exc_info); \\
} \\
}
#define __Pyx_TraceReturn(result) \\
if (unlikely(__Pyx_use_tracing) && PyThreadState_GET()->use_tracing && PyThreadState_GET()->c_profilefunc) { \\
PyThreadState_GET()->c_profilefunc( \\
PyThreadState_GET()->c_profileobj, %(FRAME)s, PyTrace_RETURN, (PyObject*)result); \\
CYTHON_FRAME_DEL; \\
}
static PyCodeObject *__Pyx_createFrameCodeObject(const char *funcname, const char *srcfile, int firstlineno); /*proto*/
static int __Pyx_TraceSetupAndCall(PyCodeObject** code, PyFrameObject** frame, const char *funcname, const char *srcfile, int firstlineno); /*proto*/
#else
#define __Pyx_TraceDeclarations
#define __Pyx_TraceCall(funcname, srcfile, firstlineno)
#define __Pyx_TraceException()
#define __Pyx_TraceReturn(result)
#endif /* CYTHON_PROFILE */
"""
% {
"FRAME": Naming.frame_cname,
"FRAME_CODE": Naming.frame_code_cname,
},
impl = """
#if CYTHON_PROFILE
static int __Pyx_TraceSetupAndCall(PyCodeObject** code,
PyFrameObject** frame,
const char *funcname,
const char *srcfile,
int firstlineno) {
if (*frame == NULL || !CYTHON_PROFILE_REUSE_FRAME) {
if (*code == NULL) {
*code = __Pyx_createFrameCodeObject(funcname, srcfile, firstlineno);
if (*code == NULL) return 0;
}
*frame = PyFrame_New(
PyThreadState_GET(), /*PyThreadState *tstate*/
*code, /*PyCodeObject *code*/
PyModule_GetDict(%(MODULE)s), /*PyObject *globals*/
0 /*PyObject *locals*/
);
if (*frame == NULL) return 0;
}
else {
(*frame)->f_tstate = PyThreadState_GET();
}
return PyThreadState_GET()->c_profilefunc(PyThreadState_GET()->c_profileobj, *frame, PyTrace_CALL, NULL) == 0;
}
static PyCodeObject *__Pyx_createFrameCodeObject(const char *funcname, const char *srcfile, int firstlineno) {
PyObject *py_srcfile = 0;
PyObject *py_funcname = 0;
PyCodeObject *py_code = 0;
#if PY_MAJOR_VERSION < 3
py_funcname = PyString_FromString(funcname);
py_srcfile = PyString_FromString(srcfile);
#else
py_funcname = PyUnicode_FromString(funcname);
py_srcfile = PyUnicode_FromString(srcfile);
#endif
if (!py_funcname | !py_srcfile) goto bad;
py_code = PyCode_New(
0, /*int argcount,*/
#if PY_MAJOR_VERSION >= 3
0, /*int kwonlyargcount,*/
#endif
0, /*int nlocals,*/
0, /*int stacksize,*/
0, /*int flags,*/
%(EMPTY_BYTES)s, /*PyObject *code,*/
%(EMPTY_TUPLE)s, /*PyObject *consts,*/
%(EMPTY_TUPLE)s, /*PyObject *names,*/
%(EMPTY_TUPLE)s, /*PyObject *varnames,*/
%(EMPTY_TUPLE)s, /*PyObject *freevars,*/
%(EMPTY_TUPLE)s, /*PyObject *cellvars,*/
py_srcfile, /*PyObject *filename,*/
py_funcname, /*PyObject *name,*/
firstlineno, /*int firstlineno,*/
%(EMPTY_BYTES)s /*PyObject *lnotab*/
);
bad:
Py_XDECREF(py_srcfile);
Py_XDECREF(py_funcname);
return py_code;
}
#endif /* CYTHON_PROFILE */
""" % {
'EMPTY_TUPLE' : Naming.empty_tuple,
'EMPTY_BYTES' : Naming.empty_bytes,
"MODULE": Naming.module_cname,
})
################ Utility code for cython.parallel stuff ################
invalid_values_utility_code = UtilityCode(
proto="""\
#include <string.h>
void __pyx_init_nan(void);
static float %(PYX_NAN)s;
""" % vars(Naming),
init="""
/* Initialize NaN. The sign is irrelevant, an exponent with all bits 1 and
a nonzero mantissa means NaN. If the first bit in the mantissa is 1, it is
a quiet NaN. */
memset(&%(PYX_NAN)s, 0xFF, sizeof(%(PYX_NAN)s));
""" % vars(Naming))
#------------------------------------------------------------------------------------
raise_import_error_utility_code = UtilityCode(
proto = '''
static CYTHON_INLINE void __Pyx_RaiseImportError(PyObject *name);
''',
impl = '''
static CYTHON_INLINE void __Pyx_RaiseImportError(PyObject *name) {
#if PY_MAJOR_VERSION < 3
PyErr_Format(PyExc_ImportError, "cannot import name %.230s",
PyString_AsString(name));
#else
PyErr_Format(PyExc_ImportError, "cannot import name %S", name);
#endif
}
''')<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from . import test_get_weight |
<|file_name|>drf.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class EnumField(serializers.ChoiceField):
default_error_messages = {"invalid_choice": _('"{input}" is not a valid choice.')}
def __init__(self, enum, **kwargs):
self.enum = enum
choices = (
(self.get_choice_value(enum_value), enum_value.label)
for _, enum_value in enum.choices()
)
super(EnumField, self).__init__(choices, **kwargs)
def get_choice_value(self, enum_value):
return enum_value.value
def to_internal_value(self, data):
if isinstance(data, six.string_types) and data.isdigit():
data = int(data)
try:
value = self.enum.get(data).value
except AttributeError: # .get() returned None
if not self.required:
raise serializers.SkipField()
self.fail("invalid_choice", input=data)
return value
def to_representation(self, value):
enum_value = self.enum.get(value)
if enum_value is not None:
return self.get_choice_value(enum_value)
class NamedEnumField(EnumField):
def get_choice_value(self, enum_value):
return enum_value.name
class Meta:
swagger_schema_fields = {"type": "string"}<|fim▁end|> | import six |
<|file_name|>pair_slices.rs<|end_file_name|><|fim▁begin|>use core::cmp::{self};
use core::mem::replace;
use crate::alloc::Allocator;
use super::VecDeque;
/// PairSlices pairs up equal length slice parts of two deques
///
/// For example, given deques "A" and "B" with the following division into slices:
///
/// A: [0 1 2] [3 4 5]
/// B: [a b] [c d e]
///
/// It produces the following sequence of matching slices:
///
/// ([0 1], [a b])
/// (\[2\], \[c\])
/// ([3 4], [d e])
///
/// and the uneven remainder of either A or B is skipped.
pub struct PairSlices<'a, 'b, T> {
a0: &'a mut [T],
a1: &'a mut [T],
b0: &'b [T],
b1: &'b [T],
}
impl<'a, 'b, T> PairSlices<'a, 'b, T> {
pub fn from<A: Allocator>(to: &'a mut VecDeque<T, A>, from: &'b VecDeque<T, A>) -> Self {
let (a0, a1) = to.as_mut_slices();
let (b0, b1) = from.as_slices();
PairSlices { a0, a1, b0, b1 }
}
pub fn has_remainder(&self) -> bool {
!self.b0.is_empty()
}
pub fn remainder(self) -> impl Iterator<Item = &'b [T]> {
IntoIterator::into_iter([self.b0, self.b1])
}<|fim▁hole|>}
impl<'a, 'b, T> Iterator for PairSlices<'a, 'b, T> {
type Item = (&'a mut [T], &'b [T]);
fn next(&mut self) -> Option<Self::Item> {
// Get next part length
let part = cmp::min(self.a0.len(), self.b0.len());
if part == 0 {
return None;
}
let (p0, p1) = replace(&mut self.a0, &mut []).split_at_mut(part);
let (q0, q1) = self.b0.split_at(part);
// Move a1 into a0, if it's empty (and b1, b0 the same way).
self.a0 = p1;
self.b0 = q1;
if self.a0.is_empty() {
self.a0 = replace(&mut self.a1, &mut []);
}
if self.b0.is_empty() {
self.b0 = replace(&mut self.b1, &[]);
}
Some((p0, q0))
}
}<|fim▁end|> | |
<|file_name|>tabs.spec.ts<|end_file_name|><|fim▁begin|>import { TestBed, ComponentFixture } from '@angular/core/testing';
import { Component } from '@angular/core';
import { createGenericTestComponent, selectElements, dispatchFixtureKeyEvent } from '../../../test/util';
import { NglTabsModule } from './module';
import { By } from '@angular/platform-browser';
const createTestComponent = (html?: string, detectChanges?: boolean) =>
createGenericTestComponent(TestComponent, html, detectChanges) as ComponentFixture<TestComponent>;
function getTabsContainer(element: Element): HTMLElement {
return <HTMLElement>element.firstElementChild;
}
function getTabsElement(element: Element): HTMLUListElement {
return <HTMLUListElement>element.querySelector('ul');
}
function getTabHeaders(element: HTMLElement): HTMLElement[] {
return selectElements(element, 'li > a');
}
function getTabContent(element: HTMLElement): string {
return element.querySelector('.slds-tabs_default__content.slds-show').textContent;
}
function expectHeaders(element: HTMLElement, expected: string[]) {
const headers = getTabHeaders(element);
expect(headers.map((h: HTMLElement) => h.innerHTML.replace(/<!--[\s\S]*?-->/g, '').trim())).toEqual(expected);
}
describe('Tabs Component', () => {
beforeEach(() => TestBed.configureTestingModule({declarations: [TestComponent], imports: [NglTabsModule]}));
it('should render the tabs container', () => {
const fixture = createTestComponent();
const host = getTabsContainer(fixture.nativeElement);
const tabs = getTabsElement(host);
expect(host).toHaveCssClass('slds-tabs_default');
expect(tabs.tagName).toBe('UL');
expect(tabs).toHaveCssClass('slds-tabs_default__nav');
});
it('should render the tab headers', () => {
const fixture = createTestComponent();
expectHeaders(fixture.nativeElement, ['First', 'Second', 'Third tab', 'Fourth tab']);
});
it('should have the proper aria attributes for headers and content', () => {
const fixture = createTestComponent();
const items = selectElements(fixture.nativeElement, 'li.slds-tabs_default__item');
const contents = selectElements(fixture.nativeElement, '.slds-tabs_default__content');
expect(items.length).toBe(4);
expect(contents.length).toBe(4);
for (let i = 0; i < 4; i++) {
const item = items[i];
const content = contents[i];
expect(item.getAttribute('aria-controls')).toEqual(content.getAttribute('id'));
expect(content.getAttribute('aria-labelledby')).toEqual(item.getAttribute('id'));
}
});
it('should render tab headers based on template', () => {
const fixture = createTestComponent(`<ngl-tabset [(selected)]="selectedTab">
<ng-template #h><b>My header</b></ng-template>
<ng-template ngl-tab [label]="h"></ng-template>
<ngl-tab label="Simple">
<ng-template ngl-tab-content></ng-template>
</ngl-tab>
<ngl-tab>
<ng-template ngl-tab-label><i>Another</i> header</ng-template>
<ng-template ngl-tab-content></ng-template>
</ngl-tab>
</ngl-tabset>`);
expectHeaders(fixture.nativeElement, ['<b>My header</b>', 'Simple', '<i>Another</i> header']);
});
it('should activate tab based on id', () => {
const fixture = createTestComponent();
expect(getTabContent(fixture.nativeElement)).toBe('Tab 2');
});
it('should render the appropriate attributes based on selection', () => {
const fixture = createTestComponent();
const headers = getTabHeaders(fixture.nativeElement);
for (let i = 0; i < 4; i++) {
const isSelected = i === 1;
expect(headers[i].getAttribute('aria-selected')).toEqual(`${isSelected}`);
expect(headers[i].getAttribute('tabindex')).toEqual(isSelected ? `0` : `-1`);
}
});
it('should request tab activation on header click', () => {
const fixture = createTestComponent();
const headers = getTabHeaders(fixture.nativeElement);
headers[2].click();
fixture.detectChanges();
expect(getTabContent(fixture.nativeElement)).toBe('Tab 3');
headers[3].click();
fixture.detectChanges();
expect(getTabContent(fixture.nativeElement)).toBe('Tab 4');
});
it('should activate tab based on keyboard', () => {
const fixture = createTestComponent();
const predicate = By.css('ul[role=tablist]');
dispatchFixtureKeyEvent(fixture, predicate, `keydown.ArrowLeft`);
fixture.detectChanges();
expect(getTabContent(fixture.nativeElement)).toBe('Tab 1');
dispatchFixtureKeyEvent(fixture, predicate, `keydown.ArrowRight`);
fixture.detectChanges();
expect(getTabContent(fixture.nativeElement)).toBe('Tab 2');
dispatchFixtureKeyEvent(fixture, predicate, `keydown.ArrowRight`);
fixture.detectChanges();
expect(getTabContent(fixture.nativeElement)).toBe('Tab 3');
});
it('should call activate/deactivate methods accordingly', () => {
const fixture = createTestComponent();
const { componentInstance } = fixture;
expect(componentInstance.activate).not.toHaveBeenCalled();
componentInstance.selectedTab = 'three';
fixture.detectChanges();
expect(componentInstance.activate).toHaveBeenCalledWith(true);
componentInstance.selectedTab = 3; // index based
fixture.detectChanges();
expect(componentInstance.activate).toHaveBeenCalledWith(false);
expect(componentInstance.activate).toHaveBeenCalledWith(4, true);
componentInstance.selectedTab = 'two';
fixture.detectChanges();
expect(componentInstance.activate).toHaveBeenCalledWith(4, false);
});
it('should allow activating tab from outside', () => {
const fixture = createTestComponent(`
<ngl-tabset [selected]="selectedTab" (selectedChange)="change($event)">
<ng-template ngl-tab></ng-template>
<ng-template ngl-tab id="another" #anotherTab="nglTab">Another tab</ng-template>
</ngl-tabset>
<button (click)="selectedTab = anotherTab"></button>
`, false);
fixture.componentInstance.selectedTab = 0;
fixture.detectChanges();
const button = fixture.nativeElement.querySelector('button');
expect(getTabContent(fixture.nativeElement)).not.toBe('Another tab');
button.click();
fixture.detectChanges();
expect(getTabContent(fixture.nativeElement)).toBe('Another tab');
});
it('should render scoped tabs correctly', () => {
const fixture = createTestComponent(`
<ngl-tabset variant="scoped">
<ng-template ngl-tab></ng-template>
</ngl-tabset>
`);
const host = getTabsContainer(fixture.nativeElement);
const tabs = getTabsElement(host);
expect(host).toHaveCssClass('slds-tabs_scoped');
expect(host).not.toHaveCssClass('slds-tabs_default');
expect(tabs).toHaveCssClass('slds-tabs_scoped__nav');
});
it('should have the proper aria attributes for headers and content', () => {
const fixture = createTestComponent(`
<ngl-tabset selected="1" [lazy]="lazy">
<ng-template ngl-tab>Tab 0</ng-template>
<ng-template ngl-tab>Tab 1</ng-template>
<ng-template ngl-tab>Tab 2</ng-template>
</ngl-tabset>
`);
const contents = <HTMLDivElement[]>selectElements(fixture.nativeElement, '.slds-tabs_default__content');
expect(contents.length).toBe(3);
for (let i = 0; i < 3; i++) {
const isActive = i === 1;
expect(contents[i]).toHaveCssClass('slds-tabs_default__content');
expect(contents[i]).toHaveCssClass(isActive ? 'slds-show' : 'slds-hide');
expect(contents[i].textContent).toEqual(isActive ? `Tab ${i}` : '');
}
fixture.componentInstance.lazy = false;
fixture.detectChanges();
expect(contents.length).toBe(3);
for (let i = 0; i < 3; i++) {
const isActive = i === 1;
expect(contents[i]).toHaveCssClass('slds-tabs_default__content');
expect(contents[i]).toHaveCssClass(isActive ? 'slds-show' : 'slds-hide');
// Content always exists
expect(contents[i].textContent).toEqual(`Tab ${i}`);
}
});
<|fim▁hole|>});
@Component({
template: `
<ngl-tabset [selected]="selectedTab" (selectedChange)="change($event)">
<ng-template ngl-tab label="First">Tab 1</ng-template>
<ng-template ngl-tab id="two" label="Second">Tab 2</ng-template>
<ng-template ngl-tab id="three" label="Third tab" (activate)="activate(true)"
(deactivate)="activate(false)">Tab 3</ng-template>
<ngl-tab (activate)="activate(4, true)" (deactivate)="activate(4, false)">
<ng-template ngl-tab-label>Fourth tab</ng-template>
<ng-template ngl-tab-content>Tab 4</ng-template>
</ngl-tab>
</ngl-tabset>
`,
})
export class TestComponent {
selectedTab: string | number = 'two';
titleCaps: string | boolean = false;
lazy = true;
change = jasmine.createSpy('selectedChange').and.callFake(($event: any) => {
this.selectedTab = $event;
});
activate = jasmine.createSpy('activate');
}<|fim▁end|> | |
<|file_name|>QChartGallery.js<|end_file_name|><|fim▁begin|>// QChartGallery.js ---
//
// Author: Julien Wintz
// Created: Thu Feb 13 23:43:13 2014 (+0100)
// Version:
// Last-Updated:
// By:
// Update #: 13
//
// Change Log:<|fim▁hole|>
// /////////////////////////////////////////////////////////////////
// Line Chart Data Sample
// /////////////////////////////////////////////////////////////////
var ChartLineData = {
labels: [],
datasets: [{
fillColor: "rgba(151,187,205,0.5)",
strokeColor: "grey",
pointColor: "rgba(151,187,205,1)",
pointStrokeColor: "grey",
data: []
}/**, {
fillColor: "rgba(151,187,205,0.5)",
strokeColor: "rgba(151,187,205,1)",
pointColor: "rgba(151,187,205,1)",
pointStrokeColor: "#ffffff",
data: []
}**/]
}
// /////////////////////////////////////////////////////////////////
// Polar Chart Data Sample
// /////////////////////////////////////////////////////////////////
var ChartPolarData = [{
value: 30,
color: "#D97041"
}, {
value: 90,
color: "#C7604C"
}, {
value: 24,
color: "#21323D"
}, {
value: 58,
color: "#9D9B7F"
}, {
value: 82,
color: "#7D4F6D"
}, {
value: 8,
color: "#584A5E"
}]
// /////////////////////////////////////////////////////////////////
// Radar Chart Data Sample
// /////////////////////////////////////////////////////////////////
var ChartRadarData = {
labels: ["Eating","Drinking","Sleeping","Designing","Coding","Partying","Running"],
datasets: [{
fillColor: "rgba(220,220,220,0.5)",
strokeColor: "rgba(220,220,220,1)",
pointColor: "rgba(220,220,220,1)",
pointStrokeColor: "#fff",
data: [65,59,90,81,56,55,40]
}, {
fillColor: "rgba(151,187,205,0.5)",
strokeColor: "rgba(151,187,205,1)",
pointColor: "rgba(151,187,205,1)",
pointStrokeColor: "#fff",
data: [28,48,40,19,96,27,100]
}]
}
// /////////////////////////////////////////////////////////////////
// Pie Chart Data Sample
// /////////////////////////////////////////////////////////////////
var ChartPieData = [{
value: 30,
color: "#F38630"
}, {
value: 50,
color: "#E0E4CC"
}, {
value: 100,
color: "#69D2E7"
}]
// /////////////////////////////////////////////////////////////////
// Doughnut Chart Data Sample
// /////////////////////////////////////////////////////////////////
var ChartDoughnutData = [{
value: 30,
color: "#F7464A"
}, {
value: 50,
color: "#E2EAE9"
}, {
value: 100,
color: "#D4CCC5"
}, {
value: 40,
color: "#949FB1"
}, {
value: 120,
color: "#4D5360"
}]
// /////////////////////////////////////////////////////////////////
// Bar Chart Data Sample
// /////////////////////////////////////////////////////////////////
var ChartBarData = {
labels: ["January","February","March","April","May","June","July"],
datasets: [{
fillColor: "rgba(220,220,220,0.5)",
strokeColor: "rgba(220,220,220,1)",
data: [65,59,90,81,56,55,40]
}, {
fillColor: "rgba(151,187,205,0.5)",
strokeColor: "rgba(151,187,205,1)",
data: [28,48,40,19,96,27,100]
}]
}<|fim▁end|> | //
// |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import { NavBar } from './NavBar';
export {
Button,
Cards,
TextInput,
NavBar
};<|fim▁end|> | import { Button } from './Button';
import * as Cards from './Card';
import { TextInput } from './Input'; |
<|file_name|>hyperparams_builder.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|># limitations under the License.
# ==============================================================================
"""Builder function to construct tf-slim arg_scope for convolution, fc ops."""
import tensorflow as tf
from object_detection.protos import hyperparams_pb2
slim = tf.contrib.slim
def build(hyperparams_config, is_training):
"""Builds tf-slim arg_scope for convolution ops based on the config.
Returns an arg_scope to use for convolution ops containing weights
initializer, weights regularizer, activation function, batch norm function
and batch norm parameters based on the configuration.
Note that if the batch_norm parameteres are not specified in the config
(i.e. left to default) then batch norm is excluded from the arg_scope.
The batch norm parameters are set for updates based on `is_training` argument
and conv_hyperparams_config.batch_norm.train parameter. During training, they
are updated only if batch_norm.train parameter is true. However, during eval,
no updates are made to the batch norm variables. In both cases, their current
values are used during forward pass.
Args:
hyperparams_config: hyperparams.proto object containing
hyperparameters.
is_training: Whether the network is in training mode.
Returns:
arg_scope: tf-slim arg_scope containing hyperparameters for ops.
Raises:
ValueError: if hyperparams_config is not of type hyperparams.Hyperparams.
"""
if not isinstance(hyperparams_config,
hyperparams_pb2.Hyperparams):
raise ValueError('hyperparams_config not of type '
'hyperparams_pb.Hyperparams.')
batch_norm = None
batch_norm_params = None
if hyperparams_config.HasField('batch_norm'):
batch_norm = slim.batch_norm
batch_norm_params = _build_batch_norm_params(
hyperparams_config.batch_norm, is_training)
affected_ops = [slim.conv2d, slim.separable_conv2d, slim.conv2d_transpose]
if hyperparams_config.HasField('op') and (
hyperparams_config.op == hyperparams_pb2.Hyperparams.FC):
affected_ops = [slim.fully_connected]
with slim.arg_scope(
affected_ops,
weights_regularizer=_build_regularizer(
hyperparams_config.regularizer),
weights_initializer=_build_initializer(
hyperparams_config.initializer),
activation_fn=_build_activation_fn(hyperparams_config.activation),
normalizer_fn=batch_norm,
normalizer_params=batch_norm_params) as sc:
return sc
def _build_activation_fn(activation_fn):
"""Builds a callable activation from config.
Args:
activation_fn: hyperparams_pb2.Hyperparams.activation
Returns:
Callable activation function.
Raises:
ValueError: On unknown activation function.
"""
if activation_fn == hyperparams_pb2.Hyperparams.NONE:
return None
if activation_fn == hyperparams_pb2.Hyperparams.RELU:
return tf.nn.relu
if activation_fn == hyperparams_pb2.Hyperparams.RELU_6:
return tf.nn.relu6
raise ValueError('Unknown activation function: {}'.format(activation_fn))
def _build_regularizer(regularizer):
"""Builds a tf-slim regularizer from config.
Args:
regularizer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
tf-slim regularizer.
Raises:
ValueError: On unknown regularizer.
"""
regularizer_oneof = regularizer.WhichOneof('regularizer_oneof')
if regularizer_oneof == 'l1_regularizer':
return slim.l1_regularizer(scale=float(regularizer.l1_regularizer.weight))
if regularizer_oneof == 'l2_regularizer':
return slim.l2_regularizer(scale=float(regularizer.l2_regularizer.weight))
raise ValueError('Unknown regularizer function: {}'.format(regularizer_oneof))
def _build_initializer(initializer):
"""Build a tf initializer from config.
Args:
initializer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
tf initializer.
Raises:
ValueError: On unknown initializer.
"""
initializer_oneof = initializer.WhichOneof('initializer_oneof')
if initializer_oneof == 'truncated_normal_initializer':
return tf.truncated_normal_initializer(
mean=initializer.truncated_normal_initializer.mean,
stddev=initializer.truncated_normal_initializer.stddev)
if initializer_oneof == 'random_normal_initializer':
return tf.random_normal_initializer(
mean=initializer.random_normal_initializer.mean,
stddev=initializer.random_normal_initializer.stddev)
if initializer_oneof == 'variance_scaling_initializer':
enum_descriptor = (hyperparams_pb2.VarianceScalingInitializer.
DESCRIPTOR.enum_types_by_name['Mode'])
mode = enum_descriptor.values_by_number[initializer.
variance_scaling_initializer.
mode].name
return slim.variance_scaling_initializer(
factor=initializer.variance_scaling_initializer.factor,
mode=mode,
uniform=initializer.variance_scaling_initializer.uniform)
raise ValueError('Unknown initializer function: {}'.format(
initializer_oneof))
def _build_batch_norm_params(batch_norm, is_training):
"""Build a dictionary of batch_norm params from config.
Args:
batch_norm: hyperparams_pb2.ConvHyperparams.batch_norm proto.
is_training: Whether the models is in training mode.
Returns:
A dictionary containing batch_norm parameters.
"""
batch_norm_params = {
'decay': batch_norm.decay,
'center': batch_norm.center,
'scale': batch_norm.scale,
'epsilon': batch_norm.epsilon,
'is_training': is_training and batch_norm.train,
}
return batch_norm_params<|fim▁end|> | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export { default } from "./Bounce"; |
<|file_name|>interval.js<|end_file_name|><|fim▁begin|>//{namespace name=backend/config/view/main}
/**
* Shopware 5
* Copyright (c) shopware AG
*
* According to our dual licensing model, this program can be used either
* under the terms of the GNU Affero General Public License, version 3,
* or under a proprietary license.
*
* The texts of the GNU Affero General Public License with an additional
* permission and of our proprietary license can be found at and
* in the LICENSE file you have received along with this program.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* "Shopware" is a registered trademark of shopware AG.
* The licensing of the program under the AGPLv3 does not imply a
* trademark license. Therefore any rights, title and interest in
* our trademarks remain entirely with us.
*
* @category Shopware
* @package Base
* @subpackage Component
* @version $Id$
* @author shopware AG
*/
Ext.define('Shopware.apps.Base.view.element.Interval', {
extend:'Ext.form.field.ComboBox',
alias:[
'widget.base-element-interval'
],
queryMode: 'local',
forceSelection: false,
editable: true,
store: [
[0, '{s name=element/interval/empty_value}None (0 Sec.){/s}'],
[120, '{s name=element/interval/2_minutes}2 Minutes (120 Sec.){/s}'],
[300, '{s name=element/interval/5_minutes}5 Minutes (300 Sec.){/s}'],
[600, '{s name=element/interval/10_minutes}10 Minutes (600 Sec.){/s}'],
[900, '{s name=element/interval/15_minutes}15 Minutes (900 Sec.){/s}'],
[1800, '{s name=element/interval/30_minutes}30 Minutes (1800 Sec.){/s}'],<|fim▁hole|> [43200, '{s name=element/interval/12_hours}12 Hours (43200 Sec.){/s}'],
[86400, '{s name=element/interval/1_day}1 Day (86400 Sec.){/s}'],
[172800, '{s name=element/interval/2_days}2 Days (172800 Sec.){/s}'],
[604800, '{s name=element/interval/1_week}1 Week (604800 Sec.){/s}']
],
initComponent:function () {
var me = this;
me.callParent(arguments);
}
});<|fim▁end|> | [3600, '{s name=element/interval/1_hour}1 Hour (3600 Sec.){/s}'],
[7200, '{s name=element/interval/2_hours}2 Hours (7200 Sec.){/s}'],
[14400, '{s name=element/interval/4_hours}4 Hours (14400 Sec.){/s}'],
[28800, '{s name=element/interval/8_hours}8 Hours (28800 Sec.){/s}'], |
<|file_name|>productiontiles.component.ts<|end_file_name|><|fim▁begin|>import {Component, ElementRef} from '@angular/core';
import { ProductionTileService } from '../../shared/services/productiontile.service';
import { Router } from '@angular/router';
import {Observable} from 'rxjs/Rx';
//import 'style-loader!./tiles.scss';
@Component({
selector: 'production-tiles',
styleUrls: ['./tiles.scss'],
templateUrl: './productiontiles.html'
})
export class ProductionTiles {
productions: Observable<Array<any>>
sub_prod: Observable<Array<any>>
summary:number=0;
constructor(private router: Router, protected service: ProductionTileService) {
}
ngOnInit() {
this.productions = this.service.getProductionTiles().map(response => response.json()["tiles"]);
this.sub_prod=this.productions;
}
button_details(id): void {
this.router.navigate(['pages/productiontiles/details', id]);
}
summary_details(id): void {
this.summary = 1;
document.getElementsByClassName('widgets')['0'].style.display = 'none';<|fim▁hole|> document.getElementById("summary").style.display = 'block';
}
back_state():void{
this.summary = 0;
document.getElementsByClassName('widgets')['0'].style.display = 'block';
document.getElementById("summary").style.display = 'none';
}
}<|fim▁end|> | |
<|file_name|>test_select_device.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
from unittest.mock import patch
from sys import modules
from json import loads
devices_data = ["""
{"identifier":"192.168.56.101:5555",
"displayName":"vbox86p",
"model":"Nexus_6P_API_23",
"version":"6.0",
"vendor":"Android",
"platform":"Android",
"status":"Connected",
"errorHelp":null,
"isTablet":false,
"type":"Emulator"}
""",
"""
{"identifier":"192.168.56.102:5555",
"displayName":"vbox86p",
"model":"Nexus_5_API_22",
"version":"5.0",
"vendor":"Android",
"platform":"Android",
"status":"Connected",
"errorHelp":null,
"isTablet":false,
"type":"Emulator"}
"""]
<|fim▁hole|>def _assert_run_command_called_with_correct_parameters(command):
assert command == ["device", "--json"]
def _run_command_false(command, on_data=None, on_done=None, show_progress=True,
in_progress_message="Loading", success_message="",
failure_message=""):
_assert_run_command_called_with_correct_parameters(command)
on_done(False)
def _run_command_no_devices(command, on_data=None, on_done=None, show_progress=True,
in_progress_message="Loading", success_message="",
failure_message=""):
_assert_run_command_called_with_correct_parameters(command)
on_done(True)
def _run_command_one_device(command, on_data=None, on_done=None, show_progress=True,
in_progress_message="Loading", success_message="",
failure_message=""):
_assert_run_command_called_with_correct_parameters(command)
on_data(devices_data[0])
on_done(True)
def _run_command_multiple_devices(command, on_data=None, on_done=None, show_progress=True,
in_progress_message="Loading", success_message="",
failure_message=""):
_assert_run_command_called_with_correct_parameters(command)
for device_data in devices_data:
on_data(device_data)
on_done(True)
class TestDevices(TestCase):
@patch('nativescript-plugin.devices_space.run_command', side_effect=_run_command_false)
def test_select_device_when_unsuccessful_should_return_none(self, run_command):
callback_called = False
devices = modules["nativescript-plugin.devices_space"]
def _callback(device):
self.assertIsNone(device)
nonlocal callback_called
callback_called = True
devices.select_device(None, _callback)
self.assertTrue(callback_called)
@patch('nativescript-plugin.devices_space.run_command', side_effect=_run_command_no_devices)
def test_select_device_when_successful_when_no_devices_should_return_none(self, run_command):
callback_called = False
devices = modules["nativescript-plugin.devices_space"]
def _callback(device):
self.assertIsNone(device)
nonlocal callback_called
callback_called = True
devices.select_device(None, _callback)
self.assertTrue(callback_called)
@patch('nativescript-plugin.devices_space.run_command', side_effect=_run_command_one_device)
def test_select_device_when_successful_when_one_device_should_return_device(self, run_command):
callback_called = False
devices = modules["nativescript-plugin.devices_space"]
def _callback(actual_device):
self.assertEqual(actual_device, loads(devices_data[0]))
nonlocal callback_called
callback_called = True
devices.select_device(None, _callback)
self.assertTrue(callback_called)
@patch('nativescript-plugin.devices_space.run_command', side_effect=_run_command_multiple_devices)
def test_select_device_when_successful_when_multiple_devices_should_prompt_user(self, run_command):
class MockWindow:
def show_quick_panel(ns_command, actual_devices, callback):
expected_devices = list(map(lambda device: modules["nativescript-plugin.helpers"].get_device_info(loads(device)), devices_data))
assert expected_devices == actual_devices
class MockNSCommand:
def get_window():
return MockWindow()
devices = modules["nativescript-plugin.devices_space"]
devices.select_device(MockNSCommand, None)
@patch('nativescript-plugin.devices_space.run_command', side_effect=_run_command_multiple_devices)
def test_select_device_when_successful_when_multiple_devices_when_user_cancels_should_return_none(self, run_command):
class MockWindow:
def show_quick_panel(ns_command, actual_devices, panel_callback):
expected_devices = list(map(lambda device: modules["nativescript-plugin.helpers"].get_device_info(loads(device)), devices_data))
assert expected_devices == actual_devices
panel_callback(-1)
class MockNSCommand:
def get_window():
return MockWindow()
callback_called = False
devices = modules["nativescript-plugin.devices_space"]
def _callback(actual_device):
self.assertIsNone(actual_device)
nonlocal callback_called
callback_called = True
devices.select_device(MockNSCommand, _callback)
@patch('nativescript-plugin.devices_space.run_command', side_effect=_run_command_multiple_devices)
def test_select_device_when_successful_when_multiple_devices_when_user_selects_should_return_device(self, run_command):
index = 1
class MockWindow:
def show_quick_panel(ns_command, actual_devices, panel_callback):
expected_devices = list(map(lambda device: modules["nativescript-plugin.helpers"].get_device_info(loads(device)), devices_data))
assert expected_devices == actual_devices
panel_callback(index)
class MockNSCommand:
def get_window():
return MockWindow()
callback_called = False
devices = modules["nativescript-plugin.devices_space"]
def _callback(actual_device):
self.assertEqual(actual_device, loads(devices_data[index]))
nonlocal callback_called
callback_called = True
devices.select_device(MockNSCommand, _callback)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>ISBNValidatorTest.java<|end_file_name|><|fim▁begin|>package de.gwdg.metadataqa.marc.definition.general.validator;
import de.gwdg.metadataqa.marc.dao.DataField;
import de.gwdg.metadataqa.marc.dao.MarcRecord;
import de.gwdg.metadataqa.marc.MarcSubfield;
import de.gwdg.metadataqa.marc.definition.ValidatorResponse;
import de.gwdg.metadataqa.marc.definition.tags.tags01x.Tag020;
import de.gwdg.metadataqa.marc.model.validation.ValidationError;
import de.gwdg.metadataqa.marc.model.validation.ValidationErrorType;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.*;
import static org.junit.Assert.assertNotNull;
public class ISBNValidatorTest {
@Test
public void testInvalid() {
MarcSubfield subfield = createMarcSubfield("3p");
assertNotNull(subfield);
assertTrue(subfield.getDefinition().hasValidator());
SubfieldValidator validator = subfield.getDefinition().getValidator();
assertNotNull(validator);
ValidatorResponse response = validator.isValid(subfield);
assertFalse(response.isValid());
ValidationError validationError = response.getValidationErrors().get(0);
assertNotNull(validationError);
assertEquals("test", validationError.getRecordId());
assertEquals("020$a", validationError.getMarcPath());
assertEquals(ValidationErrorType.SUBFIELD_ISBN, validationError.getType());
assertEquals("ISBN does not fit the pattern \\d[\\d-]+[\\dxX].",
validationError.getMessage());
/*
assertEquals("'3p' does not a have an ISBN value, it does not fit the pattern \\d[\\d-]+[\\dxX].",
validationError.getMessage());
*/
assertEquals("https://en.wikipedia.org/wiki/International_Standard_Book_Number", validationError.getUrl());
}
@Test
public void test9992158107() {
MarcSubfield subfield = createMarcSubfield("99921-58-10-7");
ValidatorResponse response = subfield.getDefinition().getValidator().isValid(subfield);
assertTrue(response.isValid());
assertEquals(0, response.getValidationErrors().size());
}
@Test
public void test9971502100() {
MarcSubfield subfield = createMarcSubfield("9971-5-0210-0");
ValidatorResponse response = subfield.getDefinition().getValidator().isValid(subfield);
assertTrue(response.isValid());
assertEquals(0, response.getValidationErrors().size());
}
@Test
public void test9604250590() {
MarcSubfield subfield = createMarcSubfield("960-425-059-0");
ValidatorResponse response = subfield.getDefinition().getValidator().isValid(subfield);
assertTrue(response.isValid());
assertEquals(0, response.getValidationErrors().size());
}
@Test
public void test8090273416() {
MarcSubfield subfield = createMarcSubfield("80-902734-1-6");
ValidatorResponse response = subfield.getDefinition().getValidator().isValid(subfield);
assertTrue(response.isValid());
assertEquals(0, response.getValidationErrors().size());
}
@Test
public void test8535902775() {
MarcSubfield subfield = createMarcSubfield("85-359-0277-5");
ValidatorResponse response = subfield.getDefinition().getValidator().isValid(subfield);
assertTrue(response.isValid());
assertEquals(0, response.getValidationErrors().size());
}
@Test
public void test1843560283() {
MarcSubfield subfield = createMarcSubfield("1-84356-028-3");
ValidatorResponse response = subfield.getDefinition().getValidator().isValid(subfield);
assertTrue(response.isValid());
assertEquals(0, response.getValidationErrors().size());
}
@Test
public void test0684843285() {
MarcSubfield subfield = createMarcSubfield("0684843285");
ValidatorResponse response = subfield.getDefinition().getValidator().isValid(subfield);
assertTrue(response.isValid());
assertEquals(0, response.getValidationErrors().size());
}
@Test
public void test080442957X() {
MarcSubfield subfield = createMarcSubfield("0-8044-2957-X");
ValidatorResponse response = subfield.getDefinition().getValidator().isValid(subfield);
assertTrue(response.isValid());
assertEquals(0, response.getValidationErrors().size());
}
@Test
public void test0851310419() {
MarcSubfield subfield = createMarcSubfield("0851310419");
ValidatorResponse response = subfield.getDefinition().getValidator().isValid(subfield);
assertTrue(response.isValid());
assertEquals(0, response.getValidationErrors().size());
}
@Test
public void test0943396042() {
MarcSubfield subfield = createMarcSubfield("0943396042");
ValidatorResponse response = subfield.getDefinition().getValidator().isValid(subfield);
assertTrue(response.isValid());
assertEquals(0, response.getValidationErrors().size());<|fim▁hole|> @Test
public void testWithSpaces() {
MarcSubfield subfield = createMarcSubfield("0 405 05352 5");
ValidatorResponse response = subfield.getDefinition().getValidator().isValid(subfield);
assertTrue(response.isValid());
assertEquals(0, response.getValidationErrors().size());
}
@Test
public void testMultiple() {
List<String> isbns = Arrays.asList("0-9752298-0-X", "0-9752298-0-X (fűzött)");
for (String ISBN : isbns) {
MarcSubfield subfield = createMarcSubfield(ISBN);
ValidatorResponse response = subfield.getDefinition().getValidator().isValid(subfield);
assertTrue(ISBN, response.isValid());
assertEquals(0, response.getValidationErrors().size());
}
}
@Test
public void testSuffixes() {
MarcSubfield subfield = createMarcSubfield("9782070769148 (broché) :");
ValidatorResponse response = subfield.getDefinition().getValidator().isValid(subfield);
assertTrue(response.isValid());
assertEquals(0, response.getValidationErrors().size());
}
private MarcSubfield createMarcSubfield(String ISBN) {
MarcRecord marcRecord = new MarcRecord("test");
DataField field = new DataField(Tag020.getInstance(), " ", " ", "a", ISBN);
field.setMarcRecord(marcRecord);
return field.getSubfield("a").get(0);
}
}<|fim▁end|> | }
|
<|file_name|>TimeSignaturePopover.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react';
import { connect } from 'react-redux';
import { StyleSheet, css } from 'aphrodite';
import { changeTimeSignature } from '../../actions/track';
import HoverableText from './HoverableText';
const styles = StyleSheet.create({
text: {
fontFamily: 'Optima, Segoe, Segoe UI, Candara, Calibri, Arial, sans-serif'
},
popoverContainer: {
background: '#FEFBF7',
height: 200,
display: 'flex',
flexDirection: 'column',
justifyContent: 'space-between'
},
templateRow: {
display: 'flex',
justifyContent: 'space-around',
paddingTop: 10
},
timeSigRow: { display: 'flex', justifyContent: 'center', flexShrink: 10 },
checkboxRow: {
display: 'flex',
justifyContent: 'space-around',
paddingBottom: 10,
paddingLeft: 5,
paddingRight: 5
},
beats: {
display: 'flex',
flexDirection: 'column',
justifyContent: 'center',
paddingTop: 15,
alignItems: 'flex-end',
flexBasis: '55%'
},
beatType: {
display: 'flex',
flexDirection: 'column',
justifyContent: 'center',
paddingBottom: 15,
alignItems: 'flex-end',
flexBasis: '55%'
},
numberText: { fontSize: 40, paddingRight: 10 },
topArrows: {
display: 'flex',
flexDirection: 'column',
justifyContent: 'center',
paddingTop: 15,
flexBasis: '45%'
},
bottomArrows: {
display: 'flex',
flexDirection: 'column',
justifyContent: 'center',
paddingBottom: 15,
flexBasis: '45%'
},
checkboxText: { fontWeight: 300, fontSize: 12, paddingTop: 3 }
});
class TimeSignaturePopover extends Component {
constructor(props) {
super(props);
this.state = {
timeSignature: Object.assign({}, props.timeSignature),
toEndChecked: false,
allChecked: false
};
}
componentWillUnmount() {
const { timeSignature, toEndChecked, allChecked } = this.state;
this.props.changeTimeSignature(
{ measureIndex: this.props.measureIndex },
timeSignature,
toEndChecked,
allChecked
);
}
onTwoFourClick = () => {
// TODO extract these things into a component
this.setState({ timeSignature: { beats: 2, beatType: 4 } });
};
onFourFourClick = () => {
this.setState({ timeSignature: { beats: 4, beatType: 4 } });
};
onSixEightClick = () => {
this.setState({ timeSignature: { beats: 6, beatType: 8 } });
};
onIncrementBeats = () => {
if (this.state.timeSignature.beats < 32) {
this.setState({
timeSignature: {
beats: this.state.timeSignature.beats + 1,
beatType: this.state.timeSignature.beatType
}
});
}
};
onIncrementBeatType = () => {
if (this.state.timeSignature.beatType < 32) {
this.setState({
timeSignature: {
beats: this.state.timeSignature.beats,
beatType: this.state.timeSignature.beatType * 2
}
});
}
};
onDecrementBeats = () => {
if (this.state.timeSignature.beats > 1) {
this.setState({
timeSignature: {
beats: this.state.timeSignature.beats - 1,
beatType: this.state.timeSignature.beatType
}
});
}
};
onDecrementBeatType = () => {
if (this.state.timeSignature.beatType > 1) {
this.setState({
timeSignature: {
beats: this.state.timeSignature.beats,
beatType: this.state.timeSignature.beatType / 2
}
});
}
};
toEndChanged = () => {
this.setState({ toEndChecked: !this.state.toEndChecked });
};
allChanged = () => {
this.setState({ allChecked: !this.state.allChecked });
};
render() {
return (
<div className={css(styles.popoverContainer)}>
<span className={css(styles.templateRow)}>
<HoverableText onClick={this.onTwoFourClick} text="2/4" />
<HoverableText onClick={this.onFourFourClick} text="4/4" />
<HoverableText onClick={this.onSixEightClick} text="6/8" />
</span>
<div className={css(styles.timeSigRow)}>
<span className={css(styles.beats)}>
<h3 className={css(styles.text, styles.numberText)}>
{this.state.timeSignature.beats}
</h3>
</span>
<span className={css(styles.topArrows)}>
<HoverableText onClick={this.onIncrementBeats} text="▲" />
<HoverableText onClick={this.onDecrementBeats} text="▼" />
</span>
</div>
<div className={css(styles.timeSigRow)}>
<span className={css(styles.beatType)}>
<h3 className={css(styles.text, styles.numberText)}>
{this.state.timeSignature.beatType}
</h3>
</span>
<span className={css(styles.bottomArrows)}>
<HoverableText onClick={this.onIncrementBeatType} text="▲" />
<HoverableText onClick={this.onDecrementBeatType} text="▼" />
</span>
</div>
<span className={css(styles.checkboxRow)}>
<small className={css(styles.text, styles.checkboxText)}>
To End
</small>
<input
type="checkbox"
value={this.state.toEndChecked}
onChange={this.toEndChanged}
/>
<small className={css(styles.text, styles.checkboxText)}>
All Measures
</small>
<input
type="checkbox"
value={this.state.allChecked}
onChange={this.allChanged}
/><|fim▁hole|> }
}
export default connect(null, { changeTimeSignature })(TimeSignaturePopover);<|fim▁end|> | </span>
</div>
); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.