prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>statix.js<|end_file_name|><|fim▁begin|>/* jshint node: true */ (function () { "use strict"; var APP; var utils = require("./utils"); var fs = require("fs"); var path = require("path"); var wrench = require("wrench"); var colors = require("colors"); var supervisor = require("supervisor"); function server(settingsFile, dir, port, interval) { utils.loadSettings(settingsFile, function (settings) { var i; dir = dir || path.join(path.dirname(settings.file), settings.source_dir); dir = dir.split(","); port = port || 8000; var reload = function (e, f) { restartServer(settings, port); }; for (i = 0; i < dir.length; i++) { fs.watch(dir[i], reload); } restartServer(settings, port); }); } function newProject(folder) { utils.fsCopy(path.join(path.dirname(__dirname), "example"), folder || "example"); } function restartServer(settings, port) { var rootDir = path.join(settings.source_dir), indexFound = false, express = require("express"), consolidate = require("consolidate"), app = express.createServer(); if (APP) { APP.close(); APP = null; } app.configure(function () { app.disable("view cache"); app.use(express.bodyParser()); app.use(express.methodOverride()); app.use(app.router); app.use(express.static(rootDir)); }); if (settings.expressConfig) { app.configure(settings.expressConfig(express, app)); } app.use(express.errorHandler({dumpExceptions: true, showStack: true})); function renderTemplate(page) { return function (req, res, next) { var prop, data = {layout : false}; for (prop in settings.global_data) { data[prop] = settings.global_data[prop]; } for (prop in page.data) { data[prop] = page.data[prop]; } consolidate[settings.template_engine](path.join(rootDir, page.source), data, function (err, html) { if (err) { throw err; } res.send(html); }); }; } var page, i, subdir; for (i = 0; i < settings.pages.length; i++) { page = settings.pages[i]; if (!indexFound && page.source.indexOf("index") > -1 || page.output.indexOf("index") > -1) { subdir = path.join(path.sep, page.output.substr(0, page.output.indexOf("index") - 1)); app.get(subdir, renderTemplate(page)); indexFound = true; } app.get(path.join(path.sep, page.output), renderTemplate(page)); } if (!indexFound) { app.get(path.sep, renderTemplate(settings.pages[0])); } var running = true; app.listen(port); console.log(""); app.on("error", function (e) { if (running) { running = false; if (e.code === "EADDRINUSE") { console.error(("Port " + port + " is already in use. Please try with a different port, or exit the process which is tying up the port.").yellow); } else { console.error(e); } } }); setTimeout(function () { if (running) { console.log("STATIX server is now running on port ".green + port.toString().yellow); } }, 500); APP = app; } function build(settingsFile) { utils.loadSettings(settingsFile, function (settings) { var consolidate = require("consolidate"), sourceDir = path.join(settings.source_dir), outputDir = path.join(settings.output_dir); console.log(""); settings.preBuild = settings.preBuild || function (cb) { return cb(); }; settings.preBuild(function () { if (fs.existsSync(outputDir)) { wrench.rmdirSyncRecursive(outputDir); } utils.copyMatchingFiles(sourceDir, outputDir, settings.include_patterns, settings.exclude_patterns); function renderTemplate(page, cb) { consolidate[settings.template_engine](path.join(sourceDir, page.source), data, function (err, html) { var outputFile = path.join(outputDir, page.output); if (fs.existsSync(outputFile)) { fs.unlinkSync(outputFile); }<|fim▁hole|> fs.writeFileSync(path.join(outputDir, page.output), html); if (cb) { cb(); } }); } function callback() { settings.postBuild = settings.postBuild || function (cb) { return cb(); }; settings.postBuild(function () { console.log("Statix build complete!".green); console.log(""); process.exit(); }); } for (var i = 0; i < settings.pages.length; i ++) { var page = settings.pages[i], data = {}, cb = null, prop; for (prop in settings.global_data) { data[prop] = settings.global_data[prop]; } for (prop in settings.build_data) { data[prop] = settings.build_data[prop]; } for (prop in page.data) { data[prop] = page.data[prop]; } if (i + 1 === settings.pages.length) { cb = callback; } renderTemplate(page, cb); } }); }); } module.exports = { utils : utils, server : server, newProject : newProject, restartServer : restartServer, build : build }; }());<|fim▁end|>
wrench.mkdirSyncRecursive(path.dirname(path.join(outputDir, page.output)));
<|file_name|>angular-translate-loader-partial.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1<|fim▁hole|><|fim▁end|>
oid sha256:d1b4f8dc9d9f4ae479ea4896a2bc244b2d51382eb978009a23c65d2d9ff28f9f size 5749
<|file_name|>time.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use ipc_channel::ipc; use profile::time; use profile_traits::ipc as ProfiledIpc; use profile_traits::time::{ProfilerCategory, ProfilerData, ProfilerMsg}; use servo_config::opts::OutputOptions; use std::thread; use std::time::Duration; #[test] fn time_profiler_smoke_test() { let chan = time::Profiler::create(&None, None); assert!(true, "Can create the profiler thread"); let (ipcchan, _ipcport) = ipc::channel().unwrap(); chan.send(ProfilerMsg::Exit(ipcchan)); assert!(true, "Can tell the profiler thread to exit"); } #[test] fn time_profiler_stats_test() { let even_data = vec![1.234, 3.24567, 3.54578, 5.0, 5.324, 7.345, 9.2345, 10.2342345, 13.2599, 15.0]; let (even_mean, even_median, even_min, even_max) = time::Profiler::get_statistics(&even_data); assert_eq!(7.34230845, even_mean); assert_eq!(7.345, even_median); assert_eq!(1.234, even_min); assert_eq!(15.0, even_max); let odd_data = vec![1.234, 3.24567, 3.54578, 5.0, 5.324, 7.345, 9.2345, 10.2342345, 13.2599]; let (odd_mean, odd_median, odd_min, odd_max) = time::Profiler::get_statistics(&odd_data); assert_eq!(6.491453833333334, odd_mean); assert_eq!(5.324, odd_median); assert_eq!(1.234, odd_min); assert_eq!(13.2599, odd_max); } #[test] fn channel_profiler_test() {<|fim▁hole|> thread::sleep(Duration::from_secs(2)); profiled_sender.send(43).unwrap(); }); let val_profile_receiver = profiled_receiver.recv().unwrap(); assert_eq!(val_profile_receiver, 43); let (sender, receiver) = ipc::channel().unwrap(); chan.send(ProfilerMsg::Get((ProfilerCategory::IpcReceiver, None), sender.clone())); match receiver.recv().unwrap() { // asserts that the time spent in the sleeping thread is more than 1500 milliseconds ProfilerData::Record(time_data) => assert!(time_data[0] > 1.5e3), ProfilerData::NoRecords => assert!(false), }; } #[test] fn bytes_channel_profiler_test() { let chan = time::Profiler::create(&Some(OutputOptions::Stdout(5.0)), None); let (profiled_sender, profiled_receiver) = ProfiledIpc::bytes_channel(chan.clone()).unwrap(); thread::spawn(move || { thread::sleep(Duration::from_secs(2)); profiled_sender.send(&[1, 2, 3]).unwrap(); }); let val_profile_receiver = profiled_receiver.recv().unwrap(); assert_eq!(val_profile_receiver, [1, 2, 3]); let (sender, receiver) = ipc::channel().unwrap(); chan.send(ProfilerMsg::Get((ProfilerCategory::IpcBytesReceiver, None), sender.clone())); match receiver.recv().unwrap() { // asserts that the time spent in the sleeping thread is more than 1500 milliseconds ProfilerData::Record(time_data) => assert!(time_data[0] > 1.5e3), ProfilerData::NoRecords => assert!(false), }; } #[cfg(debug_assertions)] #[test] #[should_panic] fn time_profiler_unsorted_stats_test() { let unsorted_data = vec![5.0, 7.5, 1.0, 8.9]; time::Profiler::get_statistics(&unsorted_data); } #[cfg(debug_assertions)] #[test] #[should_panic] fn time_profiler_data_len_zero() { let zero_data = vec![]; time::Profiler::get_statistics(&zero_data); }<|fim▁end|>
let chan = time::Profiler::create(&Some(OutputOptions::Stdout(5.0)), None); let (profiled_sender, profiled_receiver) = ProfiledIpc::channel(chan.clone()).unwrap(); thread::spawn(move || {
<|file_name|>index.js<|end_file_name|><|fim▁begin|>export { default as Home} from './home'; export { default as Post} from './post'; export { default as Search} from './search';<|fim▁hole|>export { default as Author} from './author'; export { default as Comment} from './comment'; export { default as Setting} from './setting'; export { default as Offline} from './offline'; export { default as OfflinePost} from './offlinePost';<|fim▁end|>
export { default as About} from './about';
<|file_name|>colorbars.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # desc = 'Color bars' phash = '' def plot(): import matplotlib as mpl from matplotlib import pyplot as pp from matplotlib import style import numpy as np # Make a figure and axes with dimensions as desired.<|fim▁hole|> # the colorbar will be used. cmap = mpl.cm.cool norm = mpl.colors.Normalize(vmin=-5, vmax=10) # ColorbarBase derives from ScalarMappable and puts a colorbar in a # specified axes, so it has everything needed for a standalone colorbar. # There are many more kwargs, but the following gives a basic continuous # colorbar with ticks and labels. cb1 = mpl.colorbar.ColorbarBase( ax[0], cmap=cmap, norm=norm, orientation='horizontal' ) cb1.set_label('Some Units') # The second example illustrates the use of a ListedColormap, a # BoundaryNorm, and extended ends to show the "over" and "under" value # colors. cmap = mpl.colors.ListedColormap(['r', 'g', 'b', 'c']) cmap.set_over('0.25') cmap.set_under('0.75') # If a ListedColormap is used, the length of the bounds array must be one # greater than the length of the color list. The bounds must be # monotonically increasing. bounds = [1, 2, 4, 7, 8] norm = mpl.colors.BoundaryNorm(bounds, cmap.N) cb2 = mpl.colorbar.ColorbarBase( ax[1], cmap=cmap, norm=norm, # to use 'extend', you must # specify two extra boundaries: boundaries=[0] + bounds + [13], extend='both', ticks=bounds, # optional spacing='proportional', orientation='horizontal' ) cb2.set_label('Discrete intervals, some other units') # The third example illustrates the use of custom length colorbar # extensions, used on a colorbar with discrete intervals. cmap = mpl.colors.ListedColormap( [[0., .4, 1.], [0., .8, 1.], [1., .8, 0.], [1., .4, 0.] ]) cmap.set_over((1., 0., 0.)) cmap.set_under((0., 0., 1.)) bounds = [-1., -.5, 0., .5, 1.] norm = mpl.colors.BoundaryNorm(bounds, cmap.N) cb3 = mpl.colorbar.ColorbarBase( ax[2], cmap=cmap, norm=norm, boundaries=[-10]+bounds+[10], extend='both', # Make the length of each extension # the same as the length of the # interior colors: extendfrac='auto', ticks=bounds, spacing='uniform', orientation='horizontal' ) cb3.set_label('Custom extension lengths, some other units') return fig<|fim▁end|>
fig, ax = pp.subplots(3) # Set the colormap and norm to correspond to the data for which
<|file_name|>struct_def.rs<|end_file_name|><|fim▁begin|>struct S1 { f1: i32, f2 } struct S2 { f1: i32<|fim▁hole|><|fim▁end|>
f2: i32 }
<|file_name|>dtmdata.py<|end_file_name|><|fim▁begin|>''' Created on 14 Jun 2016 @author: gjermund.vingerhagen ''' import numpy as np import scipy.interpolate as intp import linecache import utmconverter as utm def splitHead(inp): return inp def lineToArr(l1): arra = np.array(np.fromstring(l1[144:1024],dtype=int,sep=' ')) for i in range(1,30): arra = np.append(arra,np.fromstring(l1[1024*i:1024*(i+1)],dtype=int,sep=' ')) return arra def findClosestPoint(east,north): try: dtminfo = getDTMFile(east,north) eastLine = round((east-dtminfo[1])//10) northLine = round((north-dtminfo[2])//10) east_delta = (east-dtminfo[1])%10 north_delta = (north-dtminfo[1])%10 return [eastLine,northLine,dtminfo[0],east_delta,north_delta,dtminfo[1],dtminfo[2]] except: raise Exception("Closest point has no DTM file ") def readFile(filename): line1 = open("C:\\python\\dtms\\{}".format(filename), 'r').read(500000) print(line1[0:134]) print(line1[150:156]) print(line1[156:162]) print(line1[162:168]) print(line1[529:535]) print(line1[535:541]) print('{:9}{}'.format('MinEast:',line1[546:570])) print('{:9}{}'.format('MinNorth:',line1[570:594])) print(line1[594:618]) print(line1[618:642]) print(line1[642:666]) print(line1[666:690]) print(line1[690:714]) print(line1[714:738]) print(line1[738:762]) print(line1[762:786]) print('{:9}{}'.format('dy:',line1[816:828])) print('{:9}{}'.format('dx:',line1[828:840])) print('{:10}{}'.format('Rows:',line1[858:864])) print('-----') print() minEast = float(line1[546:570]) minNorth = float(line1[570:594]) print(line1[1024+30720*0:1024+144+30720*0]) #=============================================================================== # print(line1[1168:2048]) # print(line1[1024*2:1024*3]) # print(line1[1024*4:1024*5]) #=============================================================================== def getElevation(eastL,northL,dtmfile): rows = 5041 head = 1024 lhead = 144 blockSize = 30720 eastLine = eastL northLine = northL with open("C:\\python\\dtms\\{}".format(dtmfile), 'r') as fin: fin.seek(head+blockSize*eastLine) data = fin.read(blockSize) if northLine < 146: s = 144+northLine*6 else: c = (northLine-146) // 170 +1 d = (northLine-146) % 170 s = 1024*(c)+d*6 return float(data[s:s+6])/10 def getElevationArea(eastLmin,northLmin,eastLmax,northLmax,dtmfile): rows = 5041 head = 1024 lhead = 144 blockSize = 30720 rect = [] with open("C:\\python\\dtms\\{}".format(dtmfile), 'r') as fin: for eastLine in range(eastLmin,eastLmax+1): line = [] fin.seek(head+blockSize*eastLine) data = fin.read(blockSize) for northLine in range(northLmin,northLmax): if northLine < 146: s = 144+northLine*6 else: c = (northLine-146) // 170 +1 d = (northLine-146) % 170 s = 1024*(c)+d*6 line.append(int(data[s:s+6])) rect.append(line) return rect def calculateEle(x,y,coordsys='utm'): if coordsys == 'latlon': east, north, zone_number, zone_letter = utm.from_latlon(x, y) else: east,north = x,y try: p = findClosestPoint(east, north) dpx = p[3] dpy = p[4] ele1 = getElevation(p[0], p[1],p[2]) ele2 = getElevation(p[0]+1, p[1],p[2]) ele3 = getElevation(p[0], p[1]+1,p[2]) ele4 = getElevation(p[0]+1, p[1]+1,p[2]) #c_ele = getInterpolatedEle(ele1,ele2,ele3,ele4,[dpx,dpy])[2] <|fim▁hole|> except Exception: raise Exception("Something went wrong") def getInterpolatedEle(p1e=10,p2e=5,p3e=5,p4e=0,pxc=[5,5]): if sum(pxc)>10: p1 = np.array([10,10,p4e]) else: p1 = np.array([0,0,p1e]) p2 = np.array([10,0,p2e]) p3 = np.array([0,10,p3e]) px = np.array([pxc[0],pxc[1]]) a = p2-p1 b = p3-p1 N = np.cross(a,b) c = px-p1[:2] x = -(N[0]*c[0]+N[1]*c[1]) / N[2] C = np.array([c[0],c[1],x]) p4 = p1 + C return p4 def interpolateEle2(p1e=10,p2e=5,p3e=5,p4e=0,pxc=[5,5]): x = np.array([0,10]) y = np.array( [0,10]) z = np.array([[p1e,p3e],[p2e,p4e]]) p1=pxc[0] p2=pxc[1] f = intp.RectBivariateSpline(x,y,z,kx=1, ky=1, s=0) return f(p1,p2)[0][0] def getDTMFile(east,north): try: dtmfile = getDTMdict() for key in dtmfile: if north>=dtmfile[key][1] and north<=dtmfile[key][1]+50000: if east>=dtmfile[key][0] and east<=dtmfile[key][0]+50000: return [key,int(dtmfile[key][0]),int(dtmfile[key][1])] except: raise Exception('DTM file not available') def getDTMdict(): dtmfile = dict() dtmfile['6404_3_10m_z32.dem'] = [399800,6399900] dtmfile['6404_4_10m_z32.dem'] = [399800,6449800] dtmfile['7005_2_10m_z32.dem'] = [549800,6999800] dtmfile['6503_3_10m_z32.dem'] = [299800,6499800] dtmfile['6903_1_10m_z32.dem'] = [349800,6949800] dtmfile['6904_4_10m_z32.dem'] = [399795,6949795] dtmfile['6505_4_10m_z32.dem'] = [499800,6549800] dtmfile['6504_1_10m_z32.dem'] = [449800,6549800] dtmfile['6604_2_10m_z32.dem'] = [449800,6599800] dtmfile['6605_3_10m_z32.dem'] = [499800,6599800] dtmfile['6603_2_10m_z32.dem'] = [349800,6599800] dtmfile['6506_1_10m_z32.dem'] = [649800,6549800] dtmfile['6506_2_10m_z32.dem'] = [649800,6503000] dtmfile['6506_3_10m_z32.dem'] = [599800,6503000] dtmfile['6506_4_10m_z32.dem'] = [599800,6549800] return dtmfile def hasDTMFile(minEast, minNorth,maxEast,maxNorth): dtmfile = getDTMdict() dtm = getDTMFile(minEast, minNorth) if dtm != -1: if (maxEast-50000)< dtm[1] and (maxNorth-50000)<dtm[2]: return True return False if __name__ == "__main__": readFile('6506_3_10m_z32.dem')<|fim▁end|>
d_ele = interpolateEle2(ele1,ele2,ele3,ele4,[dpx,dpy]) return d_ele
<|file_name|>elasticsearch_logging_discovery.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package main import ( "flag" "fmt" "os" "strings" "time" "github.com/golang/glog" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" restclient "k8s.io/client-go/rest" "k8s.io/client-go/tools/clientcmd" clientapi "k8s.io/client-go/tools/clientcmd/api" api "k8s.io/kubernetes/pkg/apis/core" clientset "k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset" ) func buildConfigFromEnvs(masterURL, kubeconfigPath string) (*restclient.Config, error) { if kubeconfigPath == "" && masterURL == "" { kubeconfig, err := restclient.InClusterConfig() if err != nil { return nil, err } return kubeconfig, nil } return clientcmd.NewNonInteractiveDeferredLoadingClientConfig( &clientcmd.ClientConfigLoadingRules{ExplicitPath: kubeconfigPath}, &clientcmd.ConfigOverrides{ClusterInfo: clientapi.Cluster{Server: masterURL}}).ClientConfig() } func flattenSubsets(subsets []api.EndpointSubset) []string { ips := []string{} for _, ss := range subsets { for _, addr := range ss.Addresses { ips = append(ips, fmt.Sprintf(`"%s"`, addr.IP)) } } return ips } func main() { flag.Parse() glog.Info("Kubernetes Elasticsearch logging discovery") cc, err := buildConfigFromEnvs(os.Getenv("APISERVER_HOST"), os.Getenv("KUBE_CONFIG_FILE")) if err != nil { glog.Fatalf("Failed to make client: %v", err) } client, err := clientset.NewForConfig(cc) if err != nil { glog.Fatalf("Failed to make client: %v", err) } namespace := metav1.NamespaceSystem envNamespace := os.Getenv("NAMESPACE") if envNamespace != "" { if _, err := client.Core().Namespaces().Get(envNamespace, metav1.GetOptions{}); err != nil { glog.Fatalf("%s namespace doesn't exist: %v", envNamespace, err) } namespace = envNamespace<|fim▁hole|> } var elasticsearch *api.Service serviceName := os.Getenv("ELASTICSEARCH_SERVICE_NAME") if serviceName == "" { serviceName = "elasticsearch-logging" } // Look for endpoints associated with the Elasticsearch loggging service. // First wait for the service to become available. for t := time.Now(); time.Since(t) < 5*time.Minute; time.Sleep(10 * time.Second) { elasticsearch, err = client.Core().Services(namespace).Get(serviceName, metav1.GetOptions{}) if err == nil { break } } // If we did not find an elasticsearch logging service then log a warning // and return without adding any unicast hosts. if elasticsearch == nil { glog.Warningf("Failed to find the elasticsearch-logging service: %v", err) return } var endpoints *api.Endpoints addrs := []string{} // Wait for some endpoints. count := 0 for t := time.Now(); time.Since(t) < 5*time.Minute; time.Sleep(10 * time.Second) { endpoints, err = client.Core().Endpoints(namespace).Get(serviceName, metav1.GetOptions{}) if err != nil { continue } addrs = flattenSubsets(endpoints.Subsets) glog.Infof("Found %s", addrs) if len(addrs) > 0 && len(addrs) == count { break } count = len(addrs) } // If there was an error finding endpoints then log a warning and quit. if err != nil { glog.Warningf("Error finding endpoints: %v", err) return } glog.Infof("Endpoints = %s", addrs) fmt.Printf("discovery.zen.ping.unicast.hosts: [%s]\n", strings.Join(addrs, ", ")) }<|fim▁end|>
<|file_name|>executor.py<|end_file_name|><|fim▁begin|>"""Executor util helpers.""" from __future__ import annotations from concurrent.futures import ThreadPoolExecutor import contextlib import logging import queue import sys from threading import Thread import time import traceback from .thread import async_raise _LOGGER = logging.getLogger(__name__) MAX_LOG_ATTEMPTS = 2 _JOIN_ATTEMPTS = 10 EXECUTOR_SHUTDOWN_TIMEOUT = 10 def _log_thread_running_at_shutdown(name: str, ident: int) -> None: """Log the stack of a thread that was still running at shutdown.""" frames = sys._current_frames() # pylint: disable=protected-access stack = frames.get(ident) formatted_stack = traceback.format_stack(stack) _LOGGER.warning( "Thread[%s] is still running at shutdown: %s", name, "".join(formatted_stack).strip(), ) def join_or_interrupt_threads( threads: set[Thread], timeout: float, log: bool ) -> set[Thread]: """Attempt to join or interrupt a set of threads.""" joined = set() timeout_per_thread = timeout / len(threads) for thread in threads: thread.join(timeout=timeout_per_thread) if not thread.is_alive() or thread.ident is None: joined.add(thread) continue if log: _log_thread_running_at_shutdown(thread.name, thread.ident) with contextlib.suppress(SystemError): # SystemError at this stage is usually a race condition # where the thread happens to die right before we force # it to raise the exception async_raise(thread.ident, SystemExit) return joined class InterruptibleThreadPoolExecutor(ThreadPoolExecutor): """A ThreadPoolExecutor instance that will not deadlock on shutdown.""" def shutdown(self, *args, **kwargs) -> None: # type: ignore """Shutdown backport from cpython 3.9 with interrupt support added.""" with self._shutdown_lock: # type: ignore[attr-defined] self._shutdown = True # Drain all work items from the queue, and then cancel their # associated futures. while True: try: work_item = self._work_queue.get_nowait() except queue.Empty: break if work_item is not None: work_item.future.cancel() # Send a wake-up to prevent threads calling # _work_queue.get(block=True) from permanently blocking. self._work_queue.put(None) # The above code is backported from python 3.9 # # For maintainability join_threads_or_timeout is # a separate function since it is not a backport from # cpython itself #<|fim▁hole|> self.join_threads_or_timeout() def join_threads_or_timeout(self) -> None: """Join threads or timeout.""" remaining_threads = set(self._threads) # type: ignore[attr-defined] start_time = time.monotonic() timeout_remaining: float = EXECUTOR_SHUTDOWN_TIMEOUT attempt = 0 while True: if not remaining_threads: return attempt += 1 remaining_threads -= join_or_interrupt_threads( remaining_threads, timeout_remaining / _JOIN_ATTEMPTS, attempt <= MAX_LOG_ATTEMPTS, ) timeout_remaining = EXECUTOR_SHUTDOWN_TIMEOUT - ( time.monotonic() - start_time ) if timeout_remaining <= 0: return<|fim▁end|>
<|file_name|>quickInfoGenerics.ts<|end_file_name|><|fim▁begin|>/// <reference path='fourslash.ts' /> ////class Con/*1*/tainer<T> { //// x: T; ////} ////interface IList</*2*/T> { //// getItem(i: number): /*3*/T; ////} ////class List2</*4*/T extends IList<number>> implements IList<T> { //// private __it/*6*/em: /*5*/T[]; //// public get/*7*/Item(i: number) { //// return this.__item[i]; //// } //// public /*8*/method</*9*/S extends IList<T>>(s: S, p: /*10*/T[]) { //// return s; //// } ////} ////function foo4</*11*/T extends Date>(test: T): T; ////function foo4</*12*/S extends string>(test: S): S; ////function foo4(test: any): any; ////function foo4</*13*/T extends Date>(test: any): any { return null; } ////var x: List2<IList<number>>; ////var y = x./*14*/getItem(10); ////var x2: IList<IList<number>>; ////var x3: IList<number>; <|fim▁hole|>verify.quickInfoIs("class Container<T>", undefined); goTo.marker("2"); verify.quickInfoIs("(type parameter) T in IList<T>", undefined); goTo.marker("3"); verify.quickInfoIs("(type parameter) T in IList<T>", undefined); goTo.marker("4"); verify.quickInfoIs("(type parameter) T in List2<T extends IList<number>>", undefined); goTo.marker("5"); verify.quickInfoIs("(type parameter) T in List2<T extends IList<number>>", undefined); goTo.marker("6"); verify.quickInfoIs("(property) List2<T extends IList<number>>.__item: T[]", undefined); goTo.marker("7"); verify.quickInfoIs("(method) List2<T extends IList<number>>.getItem(i: number): T", undefined); goTo.marker("8"); verify.quickInfoIs("(method) List2<T extends IList<number>>.method<S extends IList<T>>(s: S, p: T[]): S", undefined); goTo.marker("9"); verify.quickInfoIs("(type parameter) S in List2<T extends IList<number>>.method<S extends IList<T>>(s: S, p: T[]): S", undefined); goTo.marker("10"); verify.quickInfoIs("(type parameter) T in List2<T extends IList<number>>", undefined); goTo.marker("11"); verify.quickInfoIs("(type parameter) T in foo4<T extends Date>(test: T): T", undefined); goTo.marker("12"); verify.quickInfoIs("(type parameter) S in foo4<S extends string>(test: S): S", undefined); goTo.marker("13"); verify.quickInfoIs("(type parameter) T in foo4<T extends Date>(test: any): any", undefined); goTo.marker("14"); verify.quickInfoIs("(method) List2<IList<number>>.getItem(i: number): IList<number>", undefined); goTo.marker("15"); verify.quickInfoIs("(method) List2<IList<number>>.method<IList<IList<number>>>(s: IList<IList<number>>, p: IList<number>[]): IList<IList<number>>", undefined);<|fim▁end|>
////var y2 = x./*15*/method(x2, [x3, x3]); goTo.marker("1");
<|file_name|>vrdisplay.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use crate::compartments::InCompartment; use crate::dom::bindings::callback::ExceptionHandling; use crate::dom::bindings::cell::DomRefCell; use crate::dom::bindings::codegen::Bindings::NavigatorBinding::NavigatorMethods; use crate::dom::bindings::codegen::Bindings::PerformanceBinding::PerformanceMethods; use crate::dom::bindings::codegen::Bindings::VRDisplayBinding; use crate::dom::bindings::codegen::Bindings::VRDisplayBinding::VRDisplayMethods; use crate::dom::bindings::codegen::Bindings::VRDisplayBinding::VREye; use crate::dom::bindings::codegen::Bindings::VRLayerBinding::VRLayer; use crate::dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLRenderingContextMethods; use crate::dom::bindings::codegen::Bindings::WindowBinding::FrameRequestCallback; use crate::dom::bindings::codegen::Bindings::WindowBinding::WindowMethods; use crate::dom::bindings::codegen::Bindings::XRRenderStateBinding::XRRenderStateInit; use crate::dom::bindings::error::Error; use crate::dom::bindings::inheritance::Castable; use crate::dom::bindings::num::Finite; use crate::dom::bindings::refcounted::{Trusted, TrustedPromise}; use crate::dom::bindings::reflector::{reflect_dom_object, DomObject}; use crate::dom::bindings::root::{DomRoot, MutDom, MutNullableDom}; use crate::dom::bindings::str::DOMString; use crate::dom::event::Event; use crate::dom::eventtarget::EventTarget; use crate::dom::globalscope::GlobalScope; use crate::dom::promise::Promise; use crate::dom::vrdisplaycapabilities::VRDisplayCapabilities; use crate::dom::vrdisplayevent::VRDisplayEvent; use crate::dom::vreyeparameters::VREyeParameters; use crate::dom::vrframedata::VRFrameData; use crate::dom::vrpose::VRPose; use crate::dom::vrstageparameters::VRStageParameters; use crate::dom::webglrenderingcontext::{WebGLMessageSender, WebGLRenderingContext}; use crate::script_runtime::CommonScriptMsg; use crate::script_runtime::ScriptThreadEventCategory::WebVREvent; use crate::task_source::{TaskSource, TaskSourceName}; use canvas_traits::webgl::{webgl_channel, WebGLReceiver, WebVRCommand}; use crossbeam_channel::{unbounded, Sender}; use dom_struct::dom_struct; use ipc_channel::ipc::IpcSender; use profile_traits::ipc; use std::cell::Cell; use std::mem; use std::rc::Rc; use std::thread; use webvr_traits::{WebVRDisplayData, WebVRDisplayEvent, WebVRFrameData, WebVRPoseInformation}; use webvr_traits::{WebVRLayer, WebVRMsg}; #[dom_struct] pub struct VRDisplay { eventtarget: EventTarget, #[ignore_malloc_size_of = "Defined in rust-webvr"] display: DomRefCell<WebVRDisplayData>, depth_near: Cell<f64>, depth_far: Cell<f64>, presenting: Cell<bool>, has_raf_thread: Cell<bool>, left_eye_params: MutDom<VREyeParameters>, right_eye_params: MutDom<VREyeParameters>, capabilities: MutDom<VRDisplayCapabilities>, stage_params: MutNullableDom<VRStageParameters>, #[ignore_malloc_size_of = "Defined in rust-webvr"] frame_data: DomRefCell<WebVRFrameData>, #[ignore_malloc_size_of = "Defined in rust-webvr"] layer: DomRefCell<WebVRLayer>, layer_ctx: MutNullableDom<WebGLRenderingContext>, #[ignore_malloc_size_of = "Defined in rust-webvr"] next_raf_id: Cell<u32>, /// List of request animation frame callbacks #[ignore_malloc_size_of = "closures are hard"] raf_callback_list: DomRefCell<Vec<(u32, Option<Rc<FrameRequestCallback>>)>>, /// When there isn't any layer_ctx the RAF thread needs to be "woken up" raf_wakeup_sender: DomRefCell<Option<Sender<()>>>, #[ignore_malloc_size_of = "Rc is hard"] pending_renderstate_updates: DomRefCell<Vec<(XRRenderStateInit, Rc<Promise>)>>, // Compositor VRFrameData synchonization frame_data_status: Cell<VRFrameDataStatus>, #[ignore_malloc_size_of = "closures are hard"] frame_data_receiver: DomRefCell<Option<WebGLReceiver<Result<WebVRPoseInformation, ()>>>>, running_display_raf: Cell<bool>, paused: Cell<bool>, stopped_on_pause: Cell<bool>, } unsafe_no_jsmanaged_fields!(WebVRDisplayData); unsafe_no_jsmanaged_fields!(WebVRFrameData); unsafe_no_jsmanaged_fields!(WebVRLayer); unsafe_no_jsmanaged_fields!(VRFrameDataStatus); #[derive(Clone, Copy, Eq, MallocSizeOf, PartialEq)] enum VRFrameDataStatus { Waiting, Synced, Exit, } #[derive(Clone, MallocSizeOf)] struct VRRAFUpdate { depth_near: f64, depth_far: f64, /// WebGL API sender api_sender: Option<WebGLMessageSender>, /// Number uniquely identifying the WebGL context /// so that we may setup/tear down VR compositors as things change context_id: usize, } type VRRAFUpdateSender = Sender<Result<VRRAFUpdate, ()>>; impl VRDisplay { fn new_inherited(global: &GlobalScope, display: WebVRDisplayData) -> VRDisplay { let stage = match display.stage_parameters { Some(ref params) => Some(VRStageParameters::new(params.clone(), &global)), None => None, }; VRDisplay { eventtarget: EventTarget::new_inherited(), display: DomRefCell::new(display.clone()), depth_near: Cell::new(0.01), depth_far: Cell::new(10000.0), presenting: Cell::new(false), has_raf_thread: Cell::new(false), left_eye_params: MutDom::new(&*VREyeParameters::new( display.left_eye_parameters.clone(), &global, )), right_eye_params: MutDom::new(&*VREyeParameters::new( display.right_eye_parameters.clone(), &global, )), capabilities: MutDom::new(&*VRDisplayCapabilities::new( display.capabilities.clone(), &global, )), stage_params: MutNullableDom::new(stage.as_deref()), frame_data: DomRefCell::new(Default::default()), layer: DomRefCell::new(Default::default()), layer_ctx: MutNullableDom::default(), next_raf_id: Cell::new(1), raf_callback_list: DomRefCell::new(vec![]), raf_wakeup_sender: DomRefCell::new(None), pending_renderstate_updates: DomRefCell::new(vec![]), frame_data_status: Cell::new(VRFrameDataStatus::Waiting), frame_data_receiver: DomRefCell::new(None), running_display_raf: Cell::new(false), // Some VR implementations (e.g. Daydream) can be paused in some life cycle situations // such as showing and hiding the controller pairing screen. paused: Cell::new(false), // This flag is set when the Display was presenting when it received a VR Pause event. // When the VR Resume event is received and the flag is set, VR presentation automatically restarts. stopped_on_pause: Cell::new(false), } } pub fn new(global: &GlobalScope, display: WebVRDisplayData) -> DomRoot<VRDisplay> { reflect_dom_object( Box::new(VRDisplay::new_inherited(&global, display)), global, VRDisplayBinding::Wrap, ) } } impl Drop for VRDisplay { fn drop(&mut self) { if self.presenting.get() { self.force_stop_present(); } } } impl VRDisplayMethods for VRDisplay { // https://w3c.github.io/webvr/#dom-vrdisplay-isconnected fn IsConnected(&self) -> bool { self.display.borrow().connected } // https://w3c.github.io/webvr/#dom-vrdisplay-ispresenting fn IsPresenting(&self) -> bool { self.presenting.get() } // https://w3c.github.io/webvr/#dom-vrdisplay-capabilities fn Capabilities(&self) -> DomRoot<VRDisplayCapabilities> { DomRoot::from_ref(&*self.capabilities.get()) } // https://w3c.github.io/webvr/#dom-vrdisplay-stageparameters fn GetStageParameters(&self) -> Option<DomRoot<VRStageParameters>> { self.stage_params.get().map(|s| DomRoot::from_ref(&*s)) } // https://w3c.github.io/webvr/#dom-vrdisplay-geteyeparameters fn GetEyeParameters(&self, eye: VREye) -> DomRoot<VREyeParameters> { match eye { VREye::Left => DomRoot::from_ref(&*self.left_eye_params.get()), VREye::Right => DomRoot::from_ref(&*self.right_eye_params.get()), } } // https://w3c.github.io/webvr/#dom-vrdisplay-displayid fn DisplayId(&self) -> u32 { self.display.borrow().display_id } // https://w3c.github.io/webvr/#dom-vrdisplay-displayname fn DisplayName(&self) -> DOMString { DOMString::from(self.display.borrow().display_name.clone()) } // https://w3c.github.io/webvr/#dom-vrdisplay-getframedata-framedata-framedata fn GetFrameData(&self, frameData: &VRFrameData) -> bool { // If presenting we use a synced data with compositor for the whole frame. // Frame data is only synced with compositor when GetFrameData is called from // inside the VRDisplay.requestAnimationFrame. This is checked using the running_display_raf property. // This check avoids data race conditions when calling GetFrameData from outside of the // VRDisplay.requestAnimationFrame callbacks and fixes a possible deadlock during the interval // when the requestAnimationFrame is moved from window to VRDisplay. if self.presenting.get() && self.running_display_raf.get() { if self.frame_data_status.get() == VRFrameDataStatus::Waiting { self.sync_frame_data(); } frameData.update(&self.frame_data.borrow()); return true; } // If not presenting we fetch inmediante VRFrameData let (sender, receiver) = ipc::channel(self.global().time_profiler_chan().clone()).unwrap(); self.webvr_thread() .send(WebVRMsg::GetFrameData( self.global().pipeline_id(), self.DisplayId(), self.depth_near.get(), self.depth_far.get(), sender, )) .unwrap(); return match receiver.recv().unwrap() { Ok(data) => { frameData.update(&data); true }, Err(e) => { error!("WebVR::GetFrameData: {:?}", e); false }, }; } // https://w3c.github.io/webvr/#dom-vrdisplay-getpose fn GetPose(&self) -> DomRoot<VRPose> { VRPose::new(&self.global(), &self.frame_data.borrow().pose) } // https://w3c.github.io/webvr/#dom-vrdisplay-resetpose fn ResetPose(&self) { let (sender, receiver) = ipc::channel(self.global().time_profiler_chan().clone()).unwrap(); self.webvr_thread() .send(WebVRMsg::ResetPose( self.global().pipeline_id(), self.DisplayId(), sender, )) .unwrap(); if let Ok(data) = receiver.recv().unwrap() { // Some VRDisplay data might change after calling ResetPose() *self.display.borrow_mut() = data; } } // https://w3c.github.io/webvr/#dom-vrdisplay-depthnear fn DepthNear(&self) -> Finite<f64> { Finite::wrap(self.depth_near.get()) } // https://w3c.github.io/webvr/#dom-vrdisplay-depthnear fn SetDepthNear(&self, value: Finite<f64>) { self.depth_near.set(*value); } // https://w3c.github.io/webvr/#dom-vrdisplay-depthfar fn DepthFar(&self) -> Finite<f64> { Finite::wrap(self.depth_far.get()) } // https://w3c.github.io/webvr/#dom-vrdisplay-depthfar fn SetDepthFar(&self, value: Finite<f64>) { self.depth_far.set(*value); } // https://w3c.github.io/webvr/#dom-vrdisplay-requestanimationframe fn RequestAnimationFrame(&self, callback: Rc<FrameRequestCallback>) -> u32 { if self.presenting.get() { let raf_id = self.next_raf_id.get(); self.next_raf_id.set(raf_id + 1); self.raf_callback_list .borrow_mut() .push((raf_id, Some(callback))); raf_id } else { // WebVR spec: When a VRDisplay is not presenting it should // fallback to window.requestAnimationFrame. self.global().as_window().RequestAnimationFrame(callback) } } // https://w3c.github.io/webvr/#dom-vrdisplay-cancelanimationframe fn CancelAnimationFrame(&self, handle: u32) { if self.presenting.get() { let mut list = self.raf_callback_list.borrow_mut(); if let Some(pair) = list.iter_mut().find(|pair| pair.0 == handle) { pair.1 = None; } } else { // WebVR spec: When a VRDisplay is not presenting it should // fallback to window.cancelAnimationFrame. self.global().as_window().CancelAnimationFrame(handle); } } // https://w3c.github.io/webvr/#dom-vrdisplay-requestpresent fn RequestPresent(&self, layers: Vec<VRLayer>, comp: InCompartment) -> Rc<Promise> { let promise = Promise::new_in_current_compartment(&self.global(), comp); // TODO: WebVR spec: this method must be called in response to a user gesture // WebVR spec: If canPresent is false the promise MUST be rejected if !self.display.borrow().capabilities.can_present { let msg = "VRDisplay canPresent is false".to_string(); promise.reject_native(&msg); return promise; } // Current WebVRSpec only allows 1 VRLayer if the VRDevice can present. // Future revisions of this spec may allow multiple layers to enable more complex rendering effects // such as compositing WebGL and DOM elements together. // That functionality is not allowed by this revision of the spec. if layers.len() != 1 { let msg = "The number of layers must be 1".to_string(); promise.reject_native(&msg); return promise; } // Parse and validate received VRLayer let layer = validate_layer(&layers[0]); let layer_bounds; let layer_ctx; match layer { Ok((bounds, ctx)) => { layer_bounds = bounds; layer_ctx = ctx; }, Err(msg) => { let msg = msg.to_string(); promise.reject_native(&msg); return promise; }, }; // WebVR spec: Repeat calls while already presenting will update the VRLayers being displayed. if self.presenting.get() { *self.layer.borrow_mut() = layer_bounds; self.layer_ctx.set(Some(&layer_ctx)); promise.resolve_native(&()); return promise; } let xr = self.global().as_window().Navigator().Xr(); if xr.pending_or_active_session() { // WebVR spec doesn't mandate anything here, however // the WebXR spec expects there to be only one immersive XR session at a time, // and WebVR is deprecated promise.reject_error(Error::InvalidState); return promise; } self.request_present(layer_bounds, Some(&layer_ctx), Some(promise.clone()), |p| { p.resolve_native(&()) }); promise } // https://w3c.github.io/webvr/#dom-vrdisplay-exitpresent fn ExitPresent(&self, comp: InCompartment) -> Rc<Promise> { let promise = Promise::new_in_current_compartment(&self.global(), comp); // WebVR spec: If the VRDisplay is not presenting the promise MUST be rejected. if !self.presenting.get() { let msg = "VRDisplay is not presenting".to_string(); promise.reject_native(&msg); return promise; } // Exit present let (sender, receiver) = ipc::channel(self.global().time_profiler_chan().clone()).unwrap(); self.webvr_thread() .send(WebVRMsg::ExitPresent( self.global().pipeline_id(), self.display.borrow().display_id, Some(sender), )) .unwrap(); match receiver.recv().unwrap() { Ok(()) => { self.stop_present(); promise.resolve_native(&()); }, Err(e) => { promise.reject_native(&e); }, } promise } // https://w3c.github.io/webvr/#dom-vrdisplay-submitframe fn SubmitFrame(&self) { if !self.presenting.get() { warn!("VRDisplay not presenting"); return; } let display_id = self.display.borrow().display_id; let layer = self.layer.borrow(); let msg = WebVRCommand::SubmitFrame(display_id, layer.left_bounds, layer.right_bounds); self.layer_ctx .get() .expect("SubmitFrame can only be called when there is a webgl layer") .send_vr_command(msg); } // https://w3c.github.io/webvr/spec/1.1/#dom-vrdisplay-getlayers fn GetLayers(&self) -> Vec<VRLayer> { // WebVR spec: MUST return an empty array if the VRDisplay is not currently presenting if !self.presenting.get() { return Vec::new(); } let layer = self.layer.borrow(); vec![VRLayer { leftBounds: Some(bounds_to_vec(&layer.left_bounds)), rightBounds: Some(bounds_to_vec(&layer.right_bounds)), source: self.layer_ctx.get().map(|ctx| ctx.Canvas()), }] } } impl VRDisplay { fn webvr_thread(&self) -> IpcSender<WebVRMsg> { self.global() .as_window() .webvr_thread() .expect("Shouldn't arrive here with WebVR disabled") } pub fn update_display(&self, display: &WebVRDisplayData) {<|fim▁hole|> if let Some(ref stage) = display.stage_parameters { if self.stage_params.get().is_none() { let params = Some(VRStageParameters::new(stage.clone(), &self.global())); self.stage_params.set(params.as_deref()); } else { self.stage_params.get().unwrap().update(&stage); } } else { self.stage_params.set(None); } } pub fn request_present<F>( &self, layer_bounds: WebVRLayer, ctx: Option<&WebGLRenderingContext>, promise: Option<Rc<Promise>>, resolve: F, ) where F: FnOnce(Rc<Promise>) + Send + 'static, { // Request Present let (sender, receiver) = ipc::channel(self.global().time_profiler_chan().clone()).unwrap(); self.webvr_thread() .send(WebVRMsg::RequestPresent( self.global().pipeline_id(), self.display.borrow().display_id, sender, )) .unwrap(); let promise = promise.map(TrustedPromise::new); let this = Trusted::new(self); let ctx = ctx.map(|c| Trusted::new(c)); let global = self.global(); let window = global.as_window(); let (task_source, canceller) = window .task_manager() .dom_manipulation_task_source_with_canceller(); thread::spawn(move || { let recv = receiver.recv().unwrap(); let _ = task_source.queue_with_canceller( task!(vr_presenting: move || { let this = this.root(); let promise = promise.map(|p| p.root()); let ctx = ctx.map(|c| c.root()); match recv { Ok(()) => { *this.layer.borrow_mut() = layer_bounds; this.layer_ctx.set(ctx.as_deref()); this.init_present(); promise.map(resolve); }, Err(e) => { promise.map(|p| p.reject_native(&e)); }, } }), &canceller, ); }); } pub fn handle_webvr_event(&self, event: &WebVRDisplayEvent) { match *event { WebVRDisplayEvent::Connect(ref display) => { self.update_display(&display); }, WebVRDisplayEvent::Disconnect(_id) => { self.display.borrow_mut().connected = false; }, WebVRDisplayEvent::Activate(ref display, _) | WebVRDisplayEvent::Deactivate(ref display, _) | WebVRDisplayEvent::Blur(ref display) | WebVRDisplayEvent::Focus(ref display) => { self.update_display(&display); self.notify_event(&event); }, WebVRDisplayEvent::PresentChange(ref display, presenting) => { self.update_display(&display); self.presenting.set(presenting); self.notify_event(&event); }, WebVRDisplayEvent::Change(ref display) => { // Change event doesn't exist in WebVR spec. // So we update display data but don't notify JS. self.update_display(&display); }, WebVRDisplayEvent::Pause(_) => { if self.paused.get() { return; } self.paused.set(true); if self.presenting.get() { self.stop_present(); self.stopped_on_pause.set(true); } }, WebVRDisplayEvent::Resume(_) => { self.paused.set(false); if self.stopped_on_pause.get() { self.stopped_on_pause.set(false); self.init_present(); } }, WebVRDisplayEvent::Exit(_) => { self.stopped_on_pause.set(false); if self.presenting.get() { self.stop_present(); } }, }; } fn notify_event(&self, event: &WebVRDisplayEvent) { let root = DomRoot::from_ref(&*self); let event = VRDisplayEvent::new_from_webvr(&self.global(), &root, &event); event .upcast::<Event>() .fire(self.global().upcast::<EventTarget>()); } fn api_sender(&self) -> Option<WebGLMessageSender> { self.layer_ctx.get().map(|c| c.webgl_sender()) } fn context_id(&self) -> usize { self.layer_ctx .get() .map(|c| &*c as *const WebGLRenderingContext as usize) .unwrap_or(0) } fn vr_raf_update(&self) -> VRRAFUpdate { VRRAFUpdate { depth_near: self.depth_near.get(), depth_far: self.depth_far.get(), api_sender: self.api_sender(), context_id: self.context_id(), } } fn init_present(&self) { self.presenting.set(true); if self.has_raf_thread.get() { return; } self.has_raf_thread.set(true); let (sync_sender, sync_receiver) = webgl_channel().unwrap(); *self.frame_data_receiver.borrow_mut() = Some(sync_receiver); let display_id = self.display.borrow().display_id; let mut api_sender = self.api_sender(); let mut context_id = self.context_id(); let js_sender = self.global().script_chan(); let address = Trusted::new(&*self); let mut near = self.depth_near.get(); let mut far = self.depth_far.get(); let pipeline_id = self.global().pipeline_id(); let (raf_sender, raf_receiver) = unbounded(); let (wakeup_sender, wakeup_receiver) = unbounded(); *self.raf_wakeup_sender.borrow_mut() = Some(wakeup_sender); // The render loop at native headset frame rate is implemented using a dedicated thread. // Every loop iteration syncs pose data with the HMD, submits the pixels to the display and waits for Vsync. // Both the requestAnimationFrame call of a VRDisplay in the JavaScript thread and the VRSyncPoses call // in the Webrender thread are executed in parallel. This allows to get some JavaScript code executed ahead. // while the render thread is syncing the VRFrameData to be used for the current frame. // This thread runs until the user calls ExitPresent, the tab is closed or some unexpected error happened. thread::Builder::new() .name("WebVR_RAF".into()) .spawn(move || { // Initialize compositor if let Some(ref api_sender) = api_sender { api_sender .send_vr(WebVRCommand::Create(display_id)) .unwrap(); } loop { if let Some(ref api_sender) = api_sender { // Run RAF callbacks on JavaScript thread let this = address.clone(); let sender = raf_sender.clone(); let task = Box::new(task!(handle_vrdisplay_raf: move || { this.root().handle_raf(&sender); })); // NOTE: WebVR spec doesn't specify what task source we should use. Is // dom-manipulation a good choice long term? js_sender .send(CommonScriptMsg::Task( WebVREvent, task, Some(pipeline_id), TaskSourceName::DOMManipulation, )) .unwrap(); // Run Sync Poses in parallell on Render thread let msg = WebVRCommand::SyncPoses( display_id, near, far, false, sync_sender.clone(), ); api_sender.send_vr(msg).unwrap(); } else { let _ = wakeup_receiver.recv(); let sender = raf_sender.clone(); let this = address.clone(); let task = Box::new(task!(flush_renderstate_queue: move || { let this = this.root(); sender.send(Ok(this.vr_raf_update())).unwrap(); })); js_sender .send(CommonScriptMsg::Task( WebVREvent, task, Some(pipeline_id), TaskSourceName::DOMManipulation, )) .unwrap(); } // Wait until both SyncPoses & RAF ends if let Ok(update) = raf_receiver.recv().unwrap() { near = update.depth_near; far = update.depth_far; if update.context_id != context_id { if let Some(ref api_sender) = update.api_sender { api_sender .send_vr(WebVRCommand::Create(display_id)) .unwrap(); } if let Some(ref api_sender) = api_sender { // shut down old vr compositor api_sender .send_vr(WebVRCommand::Release(display_id)) .unwrap(); } context_id = update.context_id; } api_sender = update.api_sender; } else { // Stop thread // ExitPresent called or some error happened return; } } }) .expect("Thread spawning failed"); } fn stop_present(&self) { self.presenting.set(false); *self.frame_data_receiver.borrow_mut() = None; self.has_raf_thread.set(false); if let Some(api_sender) = self.api_sender() { let display_id = self.display.borrow().display_id; api_sender .send_vr(WebVRCommand::Release(display_id)) .unwrap(); } } // Only called when the JSContext is destroyed while presenting. // In this case we don't want to wait for WebVR Thread response. fn force_stop_present(&self) { self.webvr_thread() .send(WebVRMsg::ExitPresent( self.global().pipeline_id(), self.display.borrow().display_id, None, )) .unwrap(); self.stop_present(); } fn sync_frame_data(&self) { let status = if let Some(receiver) = self.frame_data_receiver.borrow().as_ref() { match receiver.recv().unwrap() { Ok(pose) => { *self.frame_data.borrow_mut() = pose.frame.block(); VRFrameDataStatus::Synced }, Err(()) => VRFrameDataStatus::Exit, } } else { VRFrameDataStatus::Exit }; self.frame_data_status.set(status); } fn handle_raf(&self, end_sender: &VRRAFUpdateSender) { self.frame_data_status.set(VRFrameDataStatus::Waiting); let now = self.global().as_window().Performance().Now(); self.running_display_raf.set(true); let mut callbacks = mem::replace(&mut *self.raf_callback_list.borrow_mut(), vec![]); // Call registered VRDisplay.requestAnimationFrame callbacks. for (_, callback) in callbacks.drain(..) { if let Some(callback) = callback { let _ = callback.Call__(Finite::wrap(*now), ExceptionHandling::Report); } } self.running_display_raf.set(false); if self.frame_data_status.get() == VRFrameDataStatus::Waiting { // User didn't call getFrameData while presenting. // We automatically reads the pending VRFrameData to avoid overflowing the IPC-Channel buffers. // Show a warning as the WebVR Spec recommends. warn!("WebVR: You should call GetFrameData while presenting"); self.sync_frame_data(); } match self.frame_data_status.get() { VRFrameDataStatus::Synced => { // Sync succeeded. Notify RAF thread. end_sender.send(Ok(self.vr_raf_update())).unwrap(); }, VRFrameDataStatus::Exit | VRFrameDataStatus::Waiting => { // ExitPresent called or some error ocurred. // Notify VRDisplay RAF thread to stop. end_sender.send(Err(())).unwrap(); }, } } } // WebVR Spec: If the number of values in the leftBounds/rightBounds arrays // is not 0 or 4 for any of the passed layers the promise is rejected fn parse_bounds(src: &Option<Vec<Finite<f32>>>, dst: &mut [f32; 4]) -> Result<(), &'static str> { match *src { Some(ref values) => { if values.len() == 0 { return Ok(()); } if values.len() != 4 { return Err( "The number of values in the leftBounds/rightBounds arrays must be 0 or 4", ); } for i in 0..4 { dst[i] = *values[i]; } Ok(()) }, None => Ok(()), } } fn validate_layer( layer: &VRLayer, ) -> Result<(WebVRLayer, DomRoot<WebGLRenderingContext>), &'static str> { let ctx = layer .source .as_ref() .map(|ref s| s.get_base_webgl_context()) .unwrap_or(None); if let Some(ctx) = ctx { let mut data = WebVRLayer::default(); parse_bounds(&layer.leftBounds, &mut data.left_bounds)?; parse_bounds(&layer.rightBounds, &mut data.right_bounds)?; Ok((data, ctx)) } else { Err("VRLayer source must be a WebGL Context") } } fn bounds_to_vec(src: &[f32; 4]) -> Vec<Finite<f32>> { vec![ Finite::wrap(src[0]), Finite::wrap(src[1]), Finite::wrap(src[2]), Finite::wrap(src[3]), ] }<|fim▁end|>
*self.display.borrow_mut() = display.clone();
<|file_name|>box.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ <%namespace name="helpers" file="/helpers.mako.rs" /> <% from data import Keyword, Method, to_rust_ident, to_camel_case%> <% data.new_style_struct("Box", inherited=False, gecko_name="Display") %> // TODO(SimonSapin): don't parse `inline-table`, since we don't support it // // We allow "display" to apply to placeholders because we need to make the // placeholder pseudo-element an inline-block in the UA stylesheet in Gecko. ${helpers.predefined_type( "display", "Display", "computed::Display::inline()", initial_specified_value="specified::Display::inline()", animation_value_type="discrete", needs_context=False, flags="APPLIES_TO_PLACEHOLDER", spec="https://drafts.csswg.org/css-display/#propdef-display", servo_restyle_damage="rebuild_and_reflow" )} // FIXME(emilio): Listing all the display values here is very unfortunate, we should teach C++ to use the // Rust enum directly, or generate the conversions to `StyleDisplay`. ${helpers.gecko_keyword_conversion( Keyword('display', """ inline block inline-block table inline-table table-row-group table-header-group table-footer-group table-row table-column-group table-column table-cell table-caption list-item none flex inline-flex grid inline-grid ruby ruby-base ruby-base-container ruby-text ruby-text-container contents flow-root -webkit-box -webkit-inline-box -moz-box -moz-inline-box -moz-grid -moz-inline-grid -moz-grid-group -moz-grid-line -moz-stack -moz-inline-stack -moz-deck -moz-popup -moz-groupbox """, gecko_enum_prefix='StyleDisplay', gecko_strip_moz_prefix=False), type="::values::specified::Display" )} ${helpers.single_keyword("-moz-top-layer", "none top", gecko_constant_prefix="NS_STYLE_TOP_LAYER", gecko_ffi_name="mTopLayer", products="gecko", animation_value_type="none", enabled_in="ua", spec="Internal (not web-exposed)")} ${helpers.single_keyword("position", "static absolute relative fixed sticky", animation_value_type="discrete", flags="CREATES_STACKING_CONTEXT ABSPOS_CB", spec="https://drafts.csswg.org/css-position/#position-property", servo_restyle_damage="rebuild_and_reflow")} <%helpers:single_keyword name="float" values="none left right" // https://drafts.csswg.org/css-logical-props/#float-clear extra_specified="inline-start inline-end" needs_conversion="True" animation_value_type="discrete" gecko_enum_prefix="StyleFloat" gecko_inexhaustive="True" gecko_ffi_name="mFloat" flags="APPLIES_TO_FIRST_LETTER" spec="https://drafts.csswg.org/css-box/#propdef-float" servo_restyle_damage="rebuild_and_reflow" > impl ToComputedValue for SpecifiedValue { type ComputedValue = computed_value::T; #[inline] fn to_computed_value(&self, context: &Context) -> computed_value::T { let ltr = context.style().writing_mode.is_bidi_ltr(); // https://drafts.csswg.org/css-logical-props/#float-clear match *self { SpecifiedValue::InlineStart => { context.rule_cache_conditions.borrow_mut() .set_writing_mode_dependency(context.builder.writing_mode); if ltr { computed_value::T::Left } else { computed_value::T::Right } } SpecifiedValue::InlineEnd => { context.rule_cache_conditions.borrow_mut() .set_writing_mode_dependency(context.builder.writing_mode); if ltr { computed_value::T::Right } else { computed_value::T::Left } } % for value in "None Left Right".split(): SpecifiedValue::${value} => computed_value::T::${value}, % endfor } } #[inline] fn from_computed_value(computed: &computed_value::T) -> SpecifiedValue { match *computed { % for value in "None Left Right".split(): computed_value::T::${value} => SpecifiedValue::${value}, % endfor } } } </%helpers:single_keyword> <%helpers:single_keyword name="clear" values="none left right both" // https://drafts.csswg.org/css-logical-props/#float-clear extra_specified="inline-start inline-end" needs_conversion="True" gecko_inexhaustive="True" animation_value_type="discrete" gecko_enum_prefix="StyleClear" gecko_ffi_name="mBreakType" spec="https://drafts.csswg.org/css-box/#propdef-clear" servo_restyle_damage="rebuild_and_reflow" > impl ToComputedValue for SpecifiedValue { type ComputedValue = computed_value::T; #[inline] fn to_computed_value(&self, context: &Context) -> computed_value::T { let ltr = context.style().writing_mode.is_bidi_ltr(); // https://drafts.csswg.org/css-logical-props/#float-clear match *self { SpecifiedValue::InlineStart => { context.rule_cache_conditions.borrow_mut() .set_writing_mode_dependency(context.builder.writing_mode); if ltr { computed_value::T::Left } else { computed_value::T::Right } } SpecifiedValue::InlineEnd => { context.rule_cache_conditions.borrow_mut() .set_writing_mode_dependency(context.builder.writing_mode); if ltr { computed_value::T::Right } else { computed_value::T::Left } } % for value in "None Left Right Both".split(): SpecifiedValue::${value} => computed_value::T::${value}, % endfor } } #[inline] fn from_computed_value(computed: &computed_value::T) -> SpecifiedValue { match *computed { % for value in "None Left Right Both".split(): computed_value::T::${value} => SpecifiedValue::${value}, % endfor } } } </%helpers:single_keyword> ${helpers.predefined_type( "vertical-align", "VerticalAlign", "computed::VerticalAlign::baseline()", animation_value_type="ComputedValue", flags="APPLIES_TO_FIRST_LETTER APPLIES_TO_FIRST_LINE APPLIES_TO_PLACEHOLDER", spec="https://www.w3.org/TR/CSS2/visudet.html#propdef-vertical-align", servo_restyle_damage = "reflow" )} // CSS 2.1, Section 11 - Visual effects ${helpers.single_keyword("-servo-overflow-clip-box", "padding-box content-box", products="servo", animation_value_type="none", enabled_in="ua", spec="Internal, not web-exposed, \ may be standardized in the future (https://developer.mozilla.org/en-US/docs/Web/CSS/overflow-clip-box)")} % for direction in ["inline", "block"]: ${helpers.predefined_type( "overflow-clip-box-" + direction, "OverflowClipBox", "computed::OverflowClipBox::PaddingBox", products="gecko", enabled_in="ua", needs_context=False, flags="APPLIES_TO_PLACEHOLDER", gecko_pref="layout.css.overflow-clip-box.enabled", animation_value_type="discrete", spec="Internal, may be standardized in the future: \ https://developer.mozilla.org/en-US/docs/Web/CSS/overflow-clip-box", )} % endfor <% overflow_custom_consts = { "-moz-hidden-unscrollable": "CLIP" } %> // FIXME(pcwalton, #2742): Implement scrolling for `scroll` and `auto`. // // We allow it to apply to placeholders for UA sheets, which set it !important. ${helpers.single_keyword("overflow-x", "visible hidden scroll auto", animation_value_type="discrete", extra_gecko_values="-moz-hidden-unscrollable", custom_consts=overflow_custom_consts, gecko_constant_prefix="NS_STYLE_OVERFLOW", flags="APPLIES_TO_PLACEHOLDER", spec="https://drafts.csswg.org/css-overflow/#propdef-overflow-x", servo_restyle_damage = "reflow")} // FIXME(pcwalton, #2742): Implement scrolling for `scroll` and `auto`. // // We allow it to apply to placeholders for UA sheets, which set it !important. <%helpers:longhand name="overflow-y" animation_value_type="discrete" flags="APPLIES_TO_PLACEHOLDER", spec="https://drafts.csswg.org/css-overflow/#propdef-overflow-y" servo_restyle_damage = "reflow"> pub use super::overflow_x::{SpecifiedValue, parse, get_initial_value, computed_value}; </%helpers:longhand> <% transition_extra_prefixes = "moz:layout.css.prefixes.transitions webkit" %> ${helpers.predefined_type("transition-duration", "Time", "computed::Time::zero()", initial_specified_value="specified::Time::zero()", parse_method="parse_non_negative", vector=True, need_index=True, animation_value_type="none", extra_prefixes=transition_extra_prefixes, spec="https://drafts.csswg.org/css-transitions/#propdef-transition-duration")} ${helpers.predefined_type("transition-timing-function", "TimingFunction", "computed::TimingFunction::ease()", initial_specified_value="specified::TimingFunction::ease()", vector=True, need_index=True, animation_value_type="none", extra_prefixes=transition_extra_prefixes, spec="https://drafts.csswg.org/css-transitions/#propdef-transition-timing-function")} ${helpers.predefined_type( "transition-property", "TransitionProperty", "computed::TransitionProperty::all()", initial_specified_value="specified::TransitionProperty::all()", vector=True, allow_empty="NotInitial", need_index=True, needs_context=False, animation_value_type="none", extra_prefixes=transition_extra_prefixes, spec="https://drafts.csswg.org/css-transitions/#propdef-transition-property", )} ${helpers.predefined_type("transition-delay", "Time", "computed::Time::zero()", initial_specified_value="specified::Time::zero()", vector=True, need_index=True, animation_value_type="none", extra_prefixes=transition_extra_prefixes, spec="https://drafts.csswg.org/css-transitions/#propdef-transition-delay")} <% animation_extra_prefixes = "moz:layout.css.prefixes.animations webkit" %> ${helpers.predefined_type( "animation-name", "AnimationName", "computed::AnimationName::none()", initial_specified_value="specified::AnimationName::none()", vector=True, need_index=True, animation_value_type="none", extra_prefixes=animation_extra_prefixes, allowed_in_keyframe_block=False, spec="https://drafts.csswg.org/css-animations/#propdef-animation-name", )} ${helpers.predefined_type("animation-duration", "Time", "computed::Time::zero()", initial_specified_value="specified::Time::zero()", parse_method="parse_non_negative", vector=True, need_index=True, animation_value_type="none", extra_prefixes=animation_extra_prefixes, spec="https://drafts.csswg.org/css-transitions/#propdef-transition-duration")} // animation-timing-function is the exception to the rule for allowed_in_keyframe_block: // https://drafts.csswg.org/css-animations/#keyframes ${helpers.predefined_type("animation-timing-function", "TimingFunction", "computed::TimingFunction::ease()", initial_specified_value="specified::TimingFunction::ease()", vector=True, need_index=True, animation_value_type="none", extra_prefixes=animation_extra_prefixes, allowed_in_keyframe_block=True, spec="https://drafts.csswg.org/css-transitions/#propdef-animation-timing-function")} ${helpers.predefined_type( "animation-iteration-count", "AnimationIterationCount", "computed::AnimationIterationCount::one()", initial_specified_value="specified::AnimationIterationCount::one()", vector=True, need_index=True, animation_value_type="none", extra_prefixes=animation_extra_prefixes, allowed_in_keyframe_block=False, spec="https://drafts.csswg.org/css-animations/#propdef-animation-iteration-count", )} <% animation_direction_custom_consts = { "alternate-reverse": "Alternate_reverse" } %> ${helpers.single_keyword("animation-direction", "normal reverse alternate alternate-reverse", need_index=True, animation_value_type="none", vector=True, gecko_enum_prefix="PlaybackDirection", custom_consts=animation_direction_custom_consts, extra_prefixes=animation_extra_prefixes, spec="https://drafts.csswg.org/css-animations/#propdef-animation-direction", allowed_in_keyframe_block=False)} ${helpers.single_keyword("animation-play-state", "running paused", need_index=True, animation_value_type="none", vector=True, extra_prefixes=animation_extra_prefixes, spec="https://drafts.csswg.org/css-animations/#propdef-animation-play-state", allowed_in_keyframe_block=False)} ${helpers.single_keyword("animation-fill-mode", "none forwards backwards both", need_index=True, animation_value_type="none", vector=True, gecko_enum_prefix="FillMode", extra_prefixes=animation_extra_prefixes, spec="https://drafts.csswg.org/css-animations/#propdef-animation-fill-mode", allowed_in_keyframe_block=False)} ${helpers.predefined_type("animation-delay", "Time",<|fim▁hole|> initial_specified_value="specified::Time::zero()", vector=True, need_index=True, animation_value_type="none", extra_prefixes=animation_extra_prefixes, spec="https://drafts.csswg.org/css-animations/#propdef-animation-delay", allowed_in_keyframe_block=False)} % for axis in ["x", "y"]: ${helpers.predefined_type( "scroll-snap-points-" + axis, "ScrollSnapPoint", "computed::ScrollSnapPoint::none()", animation_value_type="discrete", gecko_pref="layout.css.scroll-snap.enabled", products="gecko", spec="Nonstandard (https://www.w3.org/TR/2015/WD-css-snappoints-1-20150326/#scroll-snap-points)", )} % endfor ${helpers.predefined_type("scroll-snap-destination", "Position", "computed::Position::zero()", products="gecko", gecko_pref="layout.css.scroll-snap.enabled", boxed=True, spec="Nonstandard (https://developer.mozilla.org/en-US/docs/Web/CSS/scroll-snap-destination)", animation_value_type="discrete")} ${helpers.predefined_type( "scroll-snap-coordinate", "Position", "computed::Position::zero()", vector=True, products="gecko", gecko_pref="layout.css.scroll-snap.enabled", spec="Nonstandard (https://developer.mozilla.org/en-US/docs/Web/CSS/scroll-snap-destination)", animation_value_type="discrete", allow_empty="NotInitial" )} <% transform_extra_prefixes = "moz:layout.css.prefixes.transforms webkit" %> ${helpers.predefined_type("transform", "Transform", "generics::transform::Transform::none()", extra_prefixes=transform_extra_prefixes, animation_value_type="ComputedValue", gecko_ffi_name="mSpecifiedTransform", flags="CREATES_STACKING_CONTEXT FIXPOS_CB", spec="https://drafts.csswg.org/css-transforms/#propdef-transform", servo_restyle_damage = "reflow_out_of_flow")} ${helpers.predefined_type("rotate", "Rotate", "generics::transform::Rotate::None", animation_value_type="ComputedValue", boxed=True, flags="CREATES_STACKING_CONTEXT FIXPOS_CB", gecko_pref="layout.css.individual-transform.enabled", spec="https://drafts.csswg.org/css-transforms-2/#individual-transforms", servo_restyle_damage = "reflow_out_of_flow")} ${helpers.predefined_type("scale", "Scale", "generics::transform::Scale::None", animation_value_type="ComputedValue", boxed=True, flags="CREATES_STACKING_CONTEXT FIXPOS_CB", gecko_pref="layout.css.individual-transform.enabled", spec="https://drafts.csswg.org/css-transforms-2/#individual-transforms", servo_restyle_damage = "reflow_out_of_flow")} ${helpers.predefined_type("translate", "Translate", "generics::transform::Translate::None", animation_value_type="ComputedValue", boxed=True, flags="CREATES_STACKING_CONTEXT FIXPOS_CB", gecko_pref="layout.css.individual-transform.enabled", spec="https://drafts.csswg.org/css-transforms-2/#individual-transforms", servo_restyle_damage = "reflow_out_of_flow")} // CSSOM View Module // https://www.w3.org/TR/cssom-view-1/ ${helpers.single_keyword("scroll-behavior", "auto smooth", gecko_pref="layout.css.scroll-behavior.property-enabled", products="gecko", spec="https://drafts.csswg.org/cssom-view/#propdef-scroll-behavior", animation_value_type="discrete")} % for axis in ["x", "y"]: ${helpers.predefined_type( "scroll-snap-type-" + axis, "ScrollSnapType", "computed::ScrollSnapType::None", products="gecko", needs_context=False, gecko_pref="layout.css.scroll-snap.enabled", spec="Nonstandard (https://developer.mozilla.org/en-US/docs/Web/CSS/scroll-snap-type-x)", animation_value_type="discrete" )} % endfor % for axis in ["x", "y"]: ${helpers.predefined_type( "overscroll-behavior-" + axis, "OverscrollBehavior", "computed::OverscrollBehavior::Auto", products="gecko", needs_context=False, gecko_pref="layout.css.overscroll-behavior.enabled", spec="https://wicg.github.io/overscroll-behavior/#overscroll-behavior-properties", animation_value_type="discrete" )} % endfor // Compositing and Blending Level 1 // http://www.w3.org/TR/compositing-1/ ${helpers.single_keyword("isolation", "auto isolate", products="gecko", gecko_pref="layout.css.isolation.enabled", spec="https://drafts.fxtf.org/compositing/#isolation", flags="CREATES_STACKING_CONTEXT", animation_value_type="discrete")} // TODO add support for logical values recto and verso ${helpers.single_keyword("page-break-after", "auto always avoid left right", products="gecko", spec="https://drafts.csswg.org/css2/page.html#propdef-page-break-after", animation_value_type="discrete")} ${helpers.single_keyword("page-break-before", "auto always avoid left right", products="gecko", spec="https://drafts.csswg.org/css2/page.html#propdef-page-break-before", animation_value_type="discrete")} ${helpers.single_keyword("page-break-inside", "auto avoid", products="gecko", gecko_ffi_name="mBreakInside", gecko_constant_prefix="NS_STYLE_PAGE_BREAK", spec="https://drafts.csswg.org/css2/page.html#propdef-page-break-inside", animation_value_type="discrete")} // CSS Basic User Interface Module Level 3 // http://dev.w3.org/csswg/css-ui // FIXME support logical values `block` and `inline` (https://drafts.csswg.org/css-logical-props/#resize) // // This is APPLIES_TO_PLACEHOLDER so we can override, in the UA sheet, the // 'resize' property we'd inherit from textarea otherwise. Basically, just // makes the UA rules easier to write. ${helpers.single_keyword("resize", "none both horizontal vertical", products="gecko", spec="https://drafts.csswg.org/css-ui/#propdef-resize", flags="APPLIES_TO_PLACEHOLDER", animation_value_type="discrete")} ${helpers.predefined_type( "perspective", "Perspective", "computed::Perspective::none()", gecko_ffi_name="mChildPerspective", spec="https://drafts.csswg.org/css-transforms/#perspective", extra_prefixes=transform_extra_prefixes, flags="CREATES_STACKING_CONTEXT FIXPOS_CB", animation_value_type="AnimatedPerspective", servo_restyle_damage = "reflow_out_of_flow", )} ${helpers.predefined_type("perspective-origin", "position::Position", "computed::position::Position::center()", boxed=True, extra_prefixes=transform_extra_prefixes, spec="https://drafts.csswg.org/css-transforms-2/#perspective-origin-property", animation_value_type="ComputedValue", servo_restyle_damage = "reflow_out_of_flow")} ${helpers.single_keyword("backface-visibility", "visible hidden", spec="https://drafts.csswg.org/css-transforms/#backface-visibility-property", extra_prefixes=transform_extra_prefixes, animation_value_type="discrete")} ${helpers.single_keyword("transform-box", "border-box fill-box view-box", gecko_enum_prefix="StyleGeometryBox", products="gecko", gecko_pref="svg.transform-box.enabled", spec="https://drafts.csswg.org/css-transforms/#transform-box", gecko_inexhaustive="True", animation_value_type="discrete")} ${helpers.predefined_type( "transform-style", "TransformStyle", "computed::TransformStyle::" + ("Auto" if product == "servo" else "Flat"), spec="https://drafts.csswg.org/css-transforms-2/#transform-style-property", needs_context=False, extra_prefixes=transform_extra_prefixes, flags="CREATES_STACKING_CONTEXT FIXPOS_CB", animation_value_type="discrete", servo_restyle_damage = "reflow_out_of_flow", )} ${helpers.predefined_type("transform-origin", "TransformOrigin", "computed::TransformOrigin::initial_value()", animation_value_type="ComputedValue", extra_prefixes=transform_extra_prefixes, gecko_ffi_name="mTransformOrigin", boxed=True, spec="https://drafts.csswg.org/css-transforms/#transform-origin-property", servo_restyle_damage = "reflow_out_of_flow")} ${helpers.predefined_type("contain", "Contain", "specified::Contain::empty()", animation_value_type="discrete", products="gecko", flags="FIXPOS_CB", gecko_pref="layout.css.contain.enabled", spec="https://drafts.csswg.org/css-contain/#contain-property")} // Non-standard ${helpers.single_keyword("-moz-appearance", """none button button-arrow-down button-arrow-next button-arrow-previous button-arrow-up button-bevel button-focus caret checkbox checkbox-container checkbox-label checkmenuitem dialog dualbutton groupbox inner-spin-button listbox listitem menuarrow menubar menucheckbox menuimage menuitem menuitemtext menulist menulist-button menulist-text menulist-textfield menupopup menuradio menuseparator meterbar meterchunk number-input progressbar progressbar-vertical progresschunk progresschunk-vertical radio radio-container radio-label radiomenuitem range range-thumb resizer resizerpanel scale-horizontal scalethumbend scalethumb-horizontal scalethumbstart scalethumbtick scalethumb-vertical scale-vertical scrollbar scrollbar-horizontal scrollbar-small scrollbar-vertical scrollbarbutton-down scrollbarbutton-left scrollbarbutton-right scrollbarbutton-up scrollbarthumb-horizontal scrollbarthumb-vertical scrollbartrack-horizontal scrollbartrack-vertical searchfield separator spinner spinner-downbutton spinner-textfield spinner-upbutton splitter statusbar statusbarpanel tab tabpanel tabpanels tab-scroll-arrow-back tab-scroll-arrow-forward textfield textfield-multiline toolbar toolbarbutton toolbarbutton-dropdown toolbargripper toolbox tooltip treeheader treeheadercell treeheadersortarrow treeitem treeline treetwisty treetwistyopen treeview window -moz-gtk-info-bar -moz-mac-active-source-list-selection -moz-mac-disclosure-button-closed -moz-mac-disclosure-button-open -moz-mac-fullscreen-button -moz-mac-help-button -moz-mac-source-list -moz-mac-source-list-selection -moz-mac-vibrancy-dark -moz-mac-vibrancy-light -moz-mac-vibrant-titlebar-light -moz-mac-vibrant-titlebar-dark -moz-win-borderless-glass -moz-win-browsertabbar-toolbox -moz-win-communications-toolbox -moz-win-exclude-glass -moz-win-glass -moz-win-media-toolbox -moz-window-button-box -moz-window-button-box-maximized -moz-window-button-close -moz-window-button-maximize -moz-window-button-minimize -moz-window-button-restore -moz-window-frame-bottom -moz-window-frame-left -moz-window-frame-right -moz-window-titlebar -moz-window-titlebar-maximized """, gecko_ffi_name="mAppearance", gecko_constant_prefix="ThemeWidgetType_NS_THEME", products="gecko", spec="Nonstandard (https://developer.mozilla.org/en-US/docs/Web/CSS/-moz-appearance)", animation_value_type="discrete")} ${helpers.predefined_type("-moz-binding", "url::UrlOrNone", "computed::url::UrlOrNone::none()", products="gecko", animation_value_type="none", gecko_ffi_name="mBinding", spec="Nonstandard (https://developer.mozilla.org/en-US/docs/Web/CSS/-moz-binding)")} ${helpers.single_keyword("-moz-orient", "inline block horizontal vertical", products="gecko", gecko_ffi_name="mOrient", gecko_enum_prefix="StyleOrient", spec="Nonstandard (https://developer.mozilla.org/en-US/docs/Web/CSS/-moz-orient)", animation_value_type="discrete")} ${helpers.predefined_type( "will-change", "WillChange", "computed::WillChange::auto()", products="gecko", animation_value_type="discrete", spec="https://drafts.csswg.org/css-will-change/#will-change" )} ${helpers.predefined_type( "shape-image-threshold", "Opacity", "0.0", products="gecko", gecko_pref="layout.css.shape-outside.enabled", animation_value_type="ComputedValue", spec="https://drafts.csswg.org/css-shapes/#shape-image-threshold-property", )} ${helpers.predefined_type( "shape-outside", "basic_shape::FloatAreaShape", "generics::basic_shape::ShapeSource::None", products="gecko", boxed=True, gecko_pref="layout.css.shape-outside.enabled", animation_value_type="ComputedValue", flags="APPLIES_TO_FIRST_LETTER", spec="https://drafts.csswg.org/css-shapes/#shape-outside-property", )} ${helpers.predefined_type( "touch-action", "TouchAction", "computed::TouchAction::auto()", products="gecko", gecko_pref="layout.css.touch_action.enabled", animation_value_type="discrete", spec="https://compat.spec.whatwg.org/#touch-action", )}<|fim▁end|>
"computed::Time::zero()",
<|file_name|>loader.py<|end_file_name|><|fim▁begin|>import csv from dateutil.parser import parse from adoptarbol.tree.models import Tree def load(filename): with open(filename, encoding='utf-8') as f: reader = csv.reader(f) header = next(reader) def pos_for(field): return header.index(field) def float_or_none(string):<|fim▁hole|> for row in reader: # codigo = str(row[pos_for('codigo')]), print('Procesando ', row) tree = {'code': row[pos_for('codigo')], 'common_name': row[pos_for('especie')], 'scientific_name': row[pos_for('cientifico')], 'family': row[pos_for('familia')], 'coord_utm_e': float_or_none(row[pos_for('utm_x')].replace(',', '.')), 'coord_utm_n': float_or_none(row[pos_for('utm_y')].replace(',', '.')), 'coord_utm_zone_letter': row[pos_for('utm_zone')], 'coord_utm_zone_n': row[pos_for('utm_south')], 'coord_lat': float_or_none(row[pos_for('lat')].replace(',', '.')), 'coord_lon': float_or_none(row[pos_for('long')].replace(',', '.')), 'photo': row[pos_for('fotos')], 'diameter': row[pos_for('dia')], 'height': row[pos_for('alt')], 'circ': row[pos_for('circ')], 'base_area': float_or_none(row[pos_for('areabasal')].replace(',', '.')), 'size_class': row[pos_for('clasetamano')], 'quality': float_or_none(row[pos_for('calidad')].replace(',', '.')), 'relevance': row[pos_for('relevancia')], 'notes': row[pos_for('notas')], 'phenology': row[pos_for('fenologia')], 'observation': row[pos_for('obs')], 'surveyed_on': parse(row[pos_for('fechahora')]), } t = Tree(**tree) t.save() """ if __name__ == '__main__': app = create_app(CONFIG) manager = Manager(app) with app.app_context(): load() """<|fim▁end|>
try: return(float(string)) except ValueError: return None
<|file_name|>PacketReceiversReady.java<|end_file_name|><|fim▁begin|>package mcjty.rftools.blocks.teleporter; import mcjty.rftools.network.PacketListFromServer; import io.netty.buffer.ByteBuf; import java.util.List; public class PacketReceiversReady extends PacketListFromServer<PacketReceiversReady,TeleportDestinationClientInfo> { public PacketReceiversReady() { } public PacketReceiversReady(int x, int y, int z, String command, List<TeleportDestinationClientInfo> list) { super(x, y, z, command, list); } @Override protected TeleportDestinationClientInfo createItem(ByteBuf buf) { return new TeleportDestinationClientInfo(buf);<|fim▁hole|>}<|fim▁end|>
}
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 Kyle Mayes // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #![feature(plugin, plugin_registrar, rustc_private)] #![cfg_attr(feature="clippy", plugin(clippy))] #![cfg_attr(feature="clippy", warn(clippy))] #![allow(plugin_as_library)] #![plugin(synthax)] extern crate rustc_plugin; extern crate syntax; extern crate synthax; use syntax::print::pprust; use syntax::abi::{Abi}; use syntax::ast::{ForeignItemKind, Unsafety, Visibility}; use syntax::codemap::{ExpnInfo, MacroAttribute, NameAndSpan, DUMMY_SP}; use syntax::ext::base::{DummyResolver, ExtCtxt}; use syntax::ext::expand::{ExpansionConfig}; use syntax::parse::{ParseSess}; use syntax::parse::token::{BinOpToken, DelimToken, Lit, Token}; use syntax::symbol::{Symbol}; //================================================ // Macros //================================================ // assert_ast_eq! ________________________________ macro_rules! assert_ast_eq { ($print:ident, $left:expr, $right:expr) => (assert_eq!(pprust::$print($left), $right)); } //================================================ // Functions //================================================ fn with_context<F: Fn(&ExtCtxt)>(f: F) { let session = ParseSess::new(); let config = ExpansionConfig::default("".into()); let mut resolver = DummyResolver; let mut context = ExtCtxt::new(&session, config, &mut resolver); let info = ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { format: MacroAttribute(Symbol::intern("test")), allow_internal_unstable: false, span: None, }, }; context.bt_push(info); f(&context); } #[test] fn test_quote_token_trees() { with_context(|c| { let tts = quote_token_trees!(c, =); assert_eq!(tts.len(), 1); assert!(tts[0].eq_token(Token::Eq)); let tts = quote_token_trees!(c, +); assert_eq!(tts.len(), 1); assert!(tts[0].eq_token(Token::BinOp(BinOpToken::Plus))); let tts = quote_token_trees!(c, 322); assert_eq!(tts.len(), 1); let lit = Lit::Integer(Symbol::intern("322")); assert!(tts[0].eq_token(Token::Literal(lit, None))); let tts = quote_token_trees!(c, 322u32); assert_eq!(tts.len(), 1); assert!(tts[0].eq_token(Token::Literal(lit, Some(Symbol::intern("u32"))))); let tts = quote_token_trees!(c, foo); assert_eq!(tts.len(), 1); assert!(tts[0].eq_token(Token::Ident(c.ident_of("foo")))); let tts = quote_token_trees!(c, 'bar); assert_eq!(tts.len(), 1); assert!(tts[0].eq_token(Token::Lifetime(c.ident_of("'bar")))); let tts = quote_token_trees!(c, /// Documentation. ); assert_eq!(tts.len(), 1); assert!(tts[0].eq_token(Token::DocComment(Symbol::intern("/// Documentation.")))); let tts = quote_token_trees!(c, foo + bar); assert_eq!(tts.len(), 3); assert!(tts[0].eq_token(Token::Ident(c.ident_of("foo")))); assert!(tts[1].eq_token(Token::BinOp(BinOpToken::Plus))); assert!(tts[2].eq_token(Token::Ident(c.ident_of("bar")))); let tts = quote_token_trees!(c, (foo + bar)); assert_eq!(tts.len(), 5); assert!(tts[0].eq_token(Token::OpenDelim(DelimToken::Paren))); assert!(tts[1].eq_token(Token::Ident(c.ident_of("foo")))); assert!(tts[2].eq_token(Token::BinOp(BinOpToken::Plus))); assert!(tts[3].eq_token(Token::Ident(c.ident_of("bar")))); assert!(tts[4].eq_token(Token::CloseDelim(DelimToken::Paren))); }); } #[test] fn test_quote_arg() { with_context(|c| { let arg = quote_arg!(c, foo: i32); assert_ast_eq!(arg_to_string, &arg, "foo: i32"); let tts = quote_token_trees!(c, foo: i32); let arg = quote_arg!(c, $($tts)*); assert_ast_eq!(arg_to_string, &arg, "foo: i32"); let pat = quote_pat!(c, foo); let ty = quote_ty!(c, i32); let arg = quote_arg!(c, $pat: $ty); assert_ast_eq!(arg_to_string, &arg, "foo: i32"); }); } #[test] fn test_quote_arm() { with_context(|c| { let arm = quote_arm!(c, foo if bar => { baz }); assert_ast_eq!(arm_to_string, &arm, " foo if bar => { baz }"); let pat = quote_pat!(c, foo); let guard = quote_expr!(c, bar); let body = quote_expr!(c, { baz }); let arm = quote_arm!(c, $pat if $guard => $body); assert_ast_eq!(arm_to_string, &arm, " foo if bar => { baz }"); }); } #[test] fn test_quote_attribute() { with_context(|c| { let attribute = quote_attribute!(c, #![feature(quote)]); assert_ast_eq!(attribute_to_string, &attribute, "#![feature(quote)]"); let meta = quote_meta_item!(c, feature(quote)); let attribute = quote_attribute!(c, #![$meta]); assert_ast_eq!(attribute_to_string, &attribute, "#![feature(quote)]"); }); } #[test] fn test_quote_bare_fn_ty() { with_context(|c| { let ty = quote_bare_fn_ty!(c, extern "C" fn(foo: i32) -> f32); assert_eq!(ty.unsafety, Unsafety::Normal); assert_eq!(ty.abi, Abi::C); assert_eq!(ty.lifetimes.len(), 0); assert_ast_eq!(fn_block_to_string, &ty.decl, "|foo: i32| -> f32"); let decl = quote_fn_decl!(c, (foo: i32) -> f32); let ty = quote_bare_fn_ty!(c, extern "C" fn $decl); assert_eq!(ty.unsafety, Unsafety::Normal); assert_eq!(ty.abi, Abi::C); assert_eq!(ty.lifetimes.len(), 0); assert_ast_eq!(fn_block_to_string, &ty.decl, "|foo: i32| -> f32"); }); } #[test] fn test_quote_block() { with_context(|c| { let block = quote_block!(c, { let foo = bar; }); assert_ast_eq!(block_to_string, &block, "{ let foo = bar; }"); let stmt = quote_stmt!(c, let foo = bar;).unwrap(); let block = quote_block!(c, { $stmt }); assert_ast_eq!(block_to_string, &block, "{ let foo = bar; }"); let expr = quote_expr!(c, foo); let block = quote_block!(c, { $stmt $expr }); assert_ast_eq!(block_to_string, &block, "{ let foo = bar; foo }"); }); } #[test] fn test_quote_expr() { with_context(|c| { let expr = quote_expr!(c, 17 + 322); assert_ast_eq!(expr_to_string, &expr, "17 + 322"); let left = quote_lit!(c, 17); let op = Token::BinOp(BinOpToken::Plus); let right = quote_lit!(c, 322); let expr = quote_expr!(c, $left $op $right); assert_ast_eq!(expr_to_string, &expr, "17 + 322"); let exprs = vec![left, right]; let expr = quote_expr!(c, $($exprs) +*); assert_ast_eq!(expr_to_string, &expr, "17 + 322"); let left = quote_field!(c, a: 322); let right = quote_field!(c, b: 17); let expr = quote_expr!(c, Struct { $left, $right }); assert_ast_eq!(expr_to_string, &expr, "Struct{a: 322, b: 17,}"); let fields = vec![left, right]; let expr = quote_expr!(c, Struct { ${fields[0]}, ${fields[1]} }); assert_ast_eq!(expr_to_string, &expr, "Struct{a: 322, b: 17,}"); let expr = quote_expr!(c, Struct { $($fields), * }); assert_ast_eq!(expr_to_string, &expr, "Struct{a: 322, b: 17,}"); let expr = quote_expr!(c, Struct { $(${&fields[..1]}), * }); assert_ast_eq!(expr_to_string, &expr, "Struct{a: 322,}"); let idents = vec![c.ident_of("foo"), c.ident_of("baz")]; let ident = c.ident_of("bar"); let expr = quote_expr!(c, $($idents + ($ident * 2)) +*); assert_ast_eq!(expr_to_string, &expr, "foo + (bar * 2) + baz + (bar * 2)");<|fim▁hole|>#[test] fn test_quote_field() { with_context(|c| { let field = quote_field!(c, foo: 322); assert_eq!(field.ident.node.name, "foo"); assert_ast_eq!(expr_to_string, &field.expr, "322"); let ident = c.ident_of("foo"); let expr = quote_expr!(c, 322); let field = quote_field!(c, $ident: $expr); assert_eq!(field.ident.node.name, "foo"); assert_ast_eq!(expr_to_string, &field.expr, "322"); }); } #[test] fn test_quote_field_pat() { with_context(|c| { let pat = quote_field_pat!(c, ref mut foo); assert_eq!(pat.ident.name, "foo"); assert_ast_eq!(pat_to_string, &pat.pat, "ref mut foo"); assert!(pat.is_shorthand); let pat = quote_field_pat!(c, foo: ref mut foo); assert_eq!(pat.ident.name, "foo"); assert_ast_eq!(pat_to_string, &pat.pat, "ref mut foo"); assert!(!pat.is_shorthand); }); } #[test] fn test_quote_fn_decl() { with_context(|c| { let decl = quote_fn_decl!(c, (foo: i32) -> f32); assert_ast_eq!(fn_block_to_string, &decl, "|foo: i32| -> f32"); let arg = quote_arg!(c, foo: i32); let ty = quote_ty!(c, f32); let decl = quote_fn_decl!(c, ($arg) -> $ty); assert_ast_eq!(fn_block_to_string, &decl, "|foo: i32| -> f32"); let idents = vec![c.ident_of("foo"), c.ident_of("bar")]; let ty1 = quote_ty!(c, i32); let ty2 = quote_ty!(c, i64); let tys = vec![ty1, ty2]; let decl = quote_fn_decl!(c, ($($idents: $tys), *) -> $ty); assert_ast_eq!(fn_block_to_string, &decl, "|foo: i32, bar: i64| -> f32"); let ty = quote_ty!(c, i32); let tys = vec![ty]; let decl = quote_fn_decl!(c, ($($idents: $($tys), *), *) -> f32); assert_ast_eq!(fn_block_to_string, &decl, "|foo: i32, bar: i32| -> f32"); let arg1 = quote_arg!(c, foo: i32); let arg2 = quote_arg!(c, bar: i64); let args = vec![arg1, arg2]; let decl = quote_fn_decl!(c, ($($args), *) -> f32); assert_ast_eq!(fn_block_to_string, &decl, "|foo: i32, bar: i64| -> f32"); }); } #[test] fn test_quote_foreign_item() { with_context(|c| { let item = quote_foreign_item!(c, fn foo(bar: i32, ...) -> f32;); assert_eq!(item.ident.name, "foo"); match item.node { ForeignItemKind::Fn(ref decl, ref generics) => { assert!(decl.variadic); assert_ast_eq!(fn_block_to_string, decl, "|bar: i32| -> f32"); assert_ast_eq!(generics_to_string, generics, ""); }, _ => panic!("expected foreign fn"), } let item = quote_foreign_item!(c, static mut foo: i32;); assert_eq!(item.ident.name, "foo"); match item.node { ForeignItemKind::Static(ref ty, ref mutable) => { assert!(mutable); assert_ast_eq!(ty_to_string, ty, "i32"); }, _ => panic!("expected foreign static"), } }); } #[test] fn test_quote_foreign_mod() { with_context(|c| { let item = quote_foreign_mod!(c, extern { fn foo(bar: i32, ...) -> f32; }); assert_eq!(item.items.len(), 1); assert_eq!(item.items[0].ident.name, "foo"); match item.items[0].node { ForeignItemKind::Fn(ref decl, ref generics) => { assert!(decl.variadic); assert_ast_eq!(fn_block_to_string, decl, "|bar: i32| -> f32"); assert_ast_eq!(generics_to_string, generics, ""); }, _ => panic!("expected foreign fn"), } let item = quote_foreign_mod!(c, extern { static mut foo: i32; }); assert_eq!(item.items.len(), 1); assert_eq!(item.items[0].ident.name, "foo"); match item.items[0].node { ForeignItemKind::Static(ref ty, ref mutable) => { assert!(mutable); assert_ast_eq!(ty_to_string, ty, "i32"); }, _ => panic!("expected foreign static"), } }); } #[test] fn test_quote_generics() { with_context(|c| { let generics = quote_generics!(c, <'foo, Bar: Baz>); assert_ast_eq!(generics_to_string, &generics, "<'foo, Bar: Baz>"); let lftm = quote_lifetime!(c, 'foo); let ident = c.ident_of("Bar"); let bound = c.ident_of("Baz"); let generics = quote_generics!(c, <$lftm, $ident: $bound>); assert_ast_eq!(generics_to_string, &generics, "<'foo, Bar: Baz>"); }); } #[test] fn test_quote_impl_item() { with_context(|c| { let item = quote_impl_item!(c, fn foo(&self, bar: i32) { }); assert_ast_eq!(impl_item_to_string, &item, "fn foo(&self, bar: i32) { }"); let ident = c.ident_of("foo"); let arg = quote_arg!(c, bar: i32); let block = quote_block!(c, { }); let item = quote_impl_item!(c, fn $ident(&self, $arg) $block); assert_ast_eq!(impl_item_to_string, &item, "fn foo(&self, bar: i32) { }"); }); } #[test] fn test_quote_item() { with_context(|c| { let item = quote_item!(c, struct Foo { bar: i32 }).unwrap(); assert_ast_eq!(item_to_string, &item, "struct Foo {\n bar: i32,\n}"); let ident = c.ident_of("Foo"); let item = quote_item!(c, struct $ident { bar: i32 }).unwrap(); assert_ast_eq!(item_to_string, &item, "struct Foo {\n bar: i32,\n}"); let ident = c.ident_of("foo"); let decl = quote_fn_decl!(c, (bar: i32)); let item = quote_item!(c, fn $ident $decl { }).unwrap(); assert_ast_eq!(item_to_string, &item, "fn foo(bar: i32) { }"); let decl = quote_fn_decl!(c, (bar: i32) -> f32); let item = quote_item!(c, fn $ident $decl { }).unwrap(); assert_ast_eq!(item_to_string, &item, "fn foo(bar: i32) -> f32 { }"); let decl = quote_fn_decl!(c, (bar: i32) -> !); let item = quote_item!(c, fn $ident $decl { }).unwrap(); assert_ast_eq!(item_to_string, &item, "fn foo(bar: i32) -> ! { }"); let item = quote_foreign_item!(c, fn foo<T>(bar: i32, ...) -> f32;); let item = quote_item!(c, extern { $item }).unwrap(); assert_ast_eq!(item_to_string, &item, "extern \"C\" {\n fn foo<T>(bar: i32, ...) -> f32;\n}"); let item = quote_foreign_item!(c, static mut foo: i32;); let item = quote_item!(c, extern { $item }).unwrap(); assert_ast_eq!(item_to_string, &item, "extern \"C\" {\n static mut foo: i32;\n}"); let item = quote_foreign_mod!(c, extern { static mut foo: i32; }); let item = quote_item!(c, $item).unwrap(); assert_ast_eq!(item_to_string, &item, "extern \"C\" {\n static mut foo: i32;\n}"); let field1 = quote_struct_field!(c, #[foo] foo: i32); let field2 = quote_struct_field!(c, pub bar: f32); let fields = vec![field1, field2]; let item = quote_item!(c, struct Struct { $($fields), * }).unwrap(); assert_ast_eq!(item_to_string, &item, r#"struct Struct { #[foo] foo: i32, pub bar: f32, }"#); let variant1 = quote_variant!(c, #[foo] Foo); let variant2 = quote_variant!(c, Bar(i32)); let variant3 = quote_variant!(c, #[baz] Baz { #[baz] baz: i32 }); let variants = vec![variant1, variant2, variant3]; let item = quote_item!(c, enum Enum { $($variants), * }).unwrap(); assert_ast_eq!(item_to_string, &item, r#"enum Enum { #[foo] Foo, Bar(i32), #[baz] Baz { #[baz] baz: i32, }, }"#); }); } #[test] fn test_quote_lifetime() { with_context(|c| { let lftm = quote_lifetime!(c, 'foo); assert_ast_eq!(lifetime_to_string, &lftm, "'foo"); }); } #[test] fn test_quote_lit() { with_context(|c| { let lit = quote_lit!(c, 322u32); assert_ast_eq!(lit_to_string, &lit, "322u32"); }); } #[test] fn test_quote_local() { with_context(|c| { let local = quote_local!(c, let foo: i32 = baz); assert_ast_eq!(pat_to_string, &local.pat, "foo"); assert_ast_eq!(ty_to_string, local.ty.as_ref().unwrap(), "i32"); assert_ast_eq!(expr_to_string, local.init.as_ref().unwrap(), "baz"); }); } #[test] fn test_quote_meta_item() { with_context(|c| { let meta = quote_meta_item!(c, foo(bar, baz)); assert_ast_eq!(meta_item_to_string, &meta, "foo(bar, baz)"); let ident = c.ident_of("foo"); let left = quote_meta_item!(c, bar); let right = quote_meta_item!(c, baz); let meta = quote_meta_item!(c, $ident($left, $right)); assert_ast_eq!(meta_item_to_string, &meta, "foo(bar, baz)"); let ident = c.ident_of("foo"); let meta1 = quote_meta_item!(c, bar); let meta2 = quote_meta_item!(c, baz); let metas = vec![meta1, meta2]; let meta = quote_meta_item!(c, $ident($($metas), *)); assert_ast_eq!(meta_item_to_string, &meta, "foo(bar, baz)"); }); } #[test] fn test_quote_pat() { with_context(|c| { let pat = quote_pat!(c, (foo, bar)); assert_ast_eq!(pat_to_string, &pat, "(foo, bar)"); let left = c.ident_of("foo"); let right = c.ident_of("bar"); let pat = quote_pat!(c, ($left, $right)); assert_ast_eq!(pat_to_string, &pat, "(foo, bar)"); let idents = vec![left, right]; let pat = quote_pat!(c, ($(ref $idents), *)); assert_ast_eq!(pat_to_string, &pat, "(ref foo, ref bar)"); let pat = quote_field_pat!(c, ref mut bar); let pat = quote_pat!(c, Foo { $pat, baz: ref baz }); assert_ast_eq!(pat_to_string, &pat, "Foo { bar: ref mut bar, baz: ref baz }"); }); } #[test] fn test_quote_path() { with_context(|c| { let path = quote_path!(c, ::foo::bar<'a>::baz<T>); assert_ast_eq!(path_to_string, &path, "::foo::bar<'a>::baz<T>"); let one = c.ident_of("foo"); let two = c.ident_of("bar"); let three = c.ident_of("baz"); let path = quote_path!(c, ::$one::$two<'a>::$three<T>); assert_ast_eq!(path_to_string, &path, "::foo::bar<'a>::baz<T>"); }); } #[test] fn test_quote_stmt() { with_context(|c| { let stmt = quote_stmt!(c, let foo = bar;).unwrap(); assert_ast_eq!(stmt_to_string, &stmt, "let foo = bar;"); let ident = c.ident_of("foo"); let ty = quote_ty!(c, i32); let expr = quote_expr!(c, bar); let stmt = quote_stmt!(c, let $ident: $ty = $expr;).unwrap(); assert_ast_eq!(stmt_to_string, &stmt, "let foo: i32 = bar;"); let local = quote_local!(c, let foo: i32 = bar); let stmt = quote_stmt!(c, $local;).unwrap(); assert_ast_eq!(stmt_to_string, &stmt, "let foo: i32 = bar;"); }); } #[test] fn test_quote_struct_field() { with_context(|c| { let field = quote_struct_field!(c, pub foo: i32); assert_eq!(field.vis, Visibility::Public); assert_eq!(field.ident.unwrap().name, "foo"); assert_ast_eq!(ty_to_string, &field.ty, "i32"); let ident = c.ident_of("foo"); let ty = quote_ty!(c, i32); let field = quote_struct_field!(c, pub $ident: $ty); assert_eq!(field.vis, Visibility::Public); assert_eq!(field.ident.unwrap().name, "foo"); assert_ast_eq!(ty_to_string, &field.ty, "i32"); }); } #[test] fn test_quote_trait_item() { with_context(|c| { let item = quote_trait_item!(c, fn foo(&self, bar: i32) { }); assert_ast_eq!(trait_item_to_string, &item, "fn foo(&self, bar: i32) { }"); let ident = c.ident_of("foo"); let arg = quote_arg!(c, bar: i32); let item = quote_trait_item!(c, fn $ident(&self, $arg) { }); assert_ast_eq!(trait_item_to_string, &item, "fn foo(&self, bar: i32) { }"); }); } #[test] fn test_quote_ty() { with_context(|c| { let ty = quote_ty!(c, i32); assert_ast_eq!(ty_to_string, &ty, "i32"); let ty = quote_bare_fn_ty!(c, extern "C" fn(foo: i32) -> f32); let ty = quote_ty!(c, $ty); assert_ast_eq!(ty_to_string, &ty, r#"extern "C" fn(foo: i32) -> f32"#); }); } #[test] fn test_quote_variant() { with_context(|c| { let variant = quote_variant!(c, Foo); assert_ast_eq!(variant_to_string, &variant, "Foo"); let variant = quote_variant!(c, Foo = 322); assert_ast_eq!(variant_to_string, &variant, "Foo = 322"); let variant = quote_variant!(c, Foo(i32)); assert_ast_eq!(variant_to_string, &variant, "Foo(i32)"); let variant = quote_variant!(c, Foo { bar: i32 }); assert_ast_eq!(variant_to_string, &variant, "Foo {\n bar: i32,\n}"); }); } #[test] fn test_quote_where_clause() { with_context(|c| { let clause = quote_where_clause!(c, where Foo: Bar + 'static); assert_ast_eq!(where_clause_to_string, &clause, " where Foo: Bar + 'static"); let ident = c.ident_of("Foo"); let bound = c.ident_of("Bar"); let lftm = quote_lifetime!(c, 'static); let clause = quote_where_clause!(c, where $ident: $bound + $lftm); assert_ast_eq!(where_clause_to_string, &clause, " where Foo: Bar + 'static"); }); }<|fim▁end|>
}); }
<|file_name|>test_graph.py<|end_file_name|><|fim▁begin|>''' Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' __author__ = 'Marko A. Rodriguez (http://markorodriguez.com)' import unittest from unittest import TestCase import six from gremlin_python.statics import long from gremlin_python.structure.graph import Edge from gremlin_python.structure.graph import Property from gremlin_python.structure.graph import Vertex from gremlin_python.structure.graph import VertexProperty from gremlin_python.structure.graph import Path class TestGraph(TestCase): def test_graph_objects(self): vertex = Vertex(1) assert "v[1]" == str(vertex) assert "vertex" == vertex.label assert "person" == Vertex(1, "person").label assert vertex == Vertex(1) # edge = Edge(2, Vertex(1), "said", Vertex("hello", "phrase")) assert "e[2][1-said->hello]" == str(edge) assert Vertex(1) == edge.outV assert Vertex("hello") == edge.inV assert "said" == edge.label<|fim▁hole|> assert "vp[name->marko]" == str(vertex_property) assert "name" == vertex_property.label assert "name" == vertex_property.key assert "marko" == vertex_property.value assert long(24) == vertex_property.id assert isinstance(vertex_property.id, long) assert vertex_property == VertexProperty(long(24), "name", "marko") # property = Property("age", 29) assert "p[age->29]" == str(property) assert "age" == property.key assert 29 == property.value assert isinstance(property.value, int) assert property == Property("age", 29) if not six.PY3: assert property != Property("age", long(29)) # for i in [vertex, edge, vertex_property, property]: for j in [vertex, edge, vertex_property, property]: if type(i) != type(j): assert i != j else: assert i == j assert i.__hash__() == hash(i) def test_path(self): path = Path([set(["a", "b"]), set(["c", "b"]), set([])], [1, Vertex(1), "hello"]) assert "[1, v[1], 'hello']" == str(path) assert 1 == path["a"] assert Vertex(1) == path["c"] assert [1, Vertex(1)] == path["b"] assert path[0] == 1 assert path[1] == Vertex(1) assert path[2] == "hello" assert 3 == len(path) assert "hello" in path assert "goodbye" not in path assert Vertex(1) in path assert Vertex(123) not in path # try: temp = path[3] raise Exception("Accessing beyond the list index should throw an index error") except IndexError: pass # try: temp = path["zz"] raise Exception("Accessing nothing should throw a key error") except KeyError: pass # try: temp = path[1:2] raise Exception("Accessing using slices should throw a type error") except TypeError: pass # assert path == path assert hash(path) == hash(path) path2 = Path([set(["a", "b"]), set(["c", "b"]), set([])], [1, Vertex(1), "hello"]) assert path == path2 assert hash(path) == hash(path2) assert path != Path([set(["a"]), set(["c", "b"]), set([])], [1, Vertex(1), "hello"]) assert path != Path([set(["a", "b"]), set(["c", "b"]), set([])], [3, Vertex(1), "hello"]) if __name__ == '__main__': unittest.main()<|fim▁end|>
assert "phrase" == edge.inV.label assert edge.inV != edge.outV # vertex_property = VertexProperty(long(24), "name", "marko")
<|file_name|>martex_tr.ts<|end_file_name|><|fim▁begin|><TS language="tr" version="2.1"> <context> <name>AddNewAddressDialog</name> <message> <source>Dialog</source> <translation>Diyalog</translation> </message> <message> <source>My Address</source> <translation>Adresim</translation> </message> <message> <source>Address</source> <translation>Adres</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> </context> <context> <name>AddNewContactDialog</name> <message> <source>Dialog</source> <translation>Diyalog</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> <message> <source>Edit Contact</source> <translation>Kişiyi Güncelle</translation> </message> <message> <source>Set a label for the selected address</source> <translation>Seçili adres için etiket belirleyin</translation> </message> <message> <source>Enter a name for the address (e.g Exchange)</source> <translation>Adres için isim girin (örn. Borsa)</translation> </message> <message> <source>SAVE</source> <translation>KAYDET</translation> </message> <message> <source>Edit label for the selected address: %1</source> <translation>Seçili adres için etiketi güncelleyin %1</translation> </message> </context> <context> <name>AddressBookPage</name> <message> <source>Right-click to edit address or label</source> <translation>Adresi ya da etiketi değiştirmek için sağ tuşa tıklayın</translation> </message> <message> <source>Create a new address</source> <translation>Yeni bir adres oluştur</translation> </message> <message> <source>&amp;New</source> <translation>&amp;Yeni</translation> </message> <message> <source>Copy the currently selected address to the system clipboard</source> <translation>Seçili olan adresi sistem panosuna kopyala</translation> </message> <message> <source>&amp;Copy</source> <translation>&amp;Kopyala</translation> </message> <message> <source>Delete the currently selected address from the list</source> <translation>Seçili olan adresi listeden sil</translation> </message> <message> <source>&amp;Delete</source> <translation>&amp;Sil</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation>Güncel sekmedeki verileri bir dosyaya aktar</translation> </message> <message> <source>&amp;Export</source> <translation>Çıkart</translation> </message> <message> <source>C&amp;lose</source> <translation>Kapat</translation> </message> <message> <source>Choose the address to send coins to</source> <translation>Coinleri gönderecek adresi seçin</translation> </message> <message> <source>Choose the address to receive coins with</source> <translation>Coinleri Alacak adresi seçin</translation> </message> <message> <source>C&amp;hoose</source> <translation>Seçin</translation> </message> <message> <source>Sending addresses</source> <translation>Gönderen adres</translation> </message> <message> <source>Receiving addresses</source> <translation>Alıcı Adres</translation> </message> <message> <source>These are your PIVX addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Bunlar ödemeleri göndermek için olan PIVX adreslerinizdir.Coinleri göndermeden önce her zaman gönderilen miktarı ve adresi kontrol edin</translation> </message> <message> <source>These are your PIVX addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source> <translation>Bunlar ödemeleri almak için olan PIVX adreslerinizdirHer işlem için yeni bir adres kullanmanız önerilir.</translation> </message> <message> <source>&amp;Copy Address</source> <translation>Adresi kopyala</translation> </message> <message> <source>Copy &amp;Label</source> <translation>Kopyala &amp;Etiket</translation> </message> <message> <source>&amp;Edit</source> <translation>&amp;Düzelt</translation> </message> <message> <source>Export Address List</source> <translation>Adres listesini çıkart</translation> </message> <message> <source>Comma separated file (*.csv)</source> <translation>Virgül ile ayrılmış dosya(*.csv)</translation> </message> <message> <source>Exporting Failed</source> <translation>Çıkartma işlemi başarısız</translation> </message> <message> <source>There was an error trying to save the address list to %1. Please try again.</source> <translation>Adres listesini %1 e kaydederken hata oldu.Lütfen tekrar deneyin</translation> </message> </context> <context> <name>AddressLabelRow</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Bob Allen</source> <translation>Bob Allen</translation> </message> <message> <source>DN6i46dytMPVhV1JMGZFuQBh7BZZ6nNLox</source> <translation>DN6i46dytMPVhV1JMGZFuQBh7BZZ6nNLox</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <source>Label</source> <translation>Etiket</translation> </message> <message> <source>Address</source> <translation>Adres</translation> </message> <message> <source>Date</source> <translation>Tarih</translation> </message> <message> <source>(no label)</source> <translation>(etiket yok)</translation> </message> </context> <context> <name>AddressesWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Send</source> <translation>Gönder</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>No active Master Node yet</source> <translation>Henüz aktif Masternode bulunmuyor</translation> </message> <message> <source>Contact name</source> <translation>Kişi İsmi</translation> </message> <message> <source>Enter address</source> <translation>Adres girin</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> <message> <source>Contacts</source> <translation>Kişiler</translation> </message> <message> <source>You can add a new one in the options menu to the side.</source> <translation>Seçenekler menüsünde yan tarafa yeni bir tane ekleyebilirsiniz.</translation> </message> <message> <source>No contacts yet</source> <translation>Girilmiş bağlantı yok</translation> </message> <message> <source>e.g. John Doe</source> <translation>örn. Ad Soyad</translation> </message> <message> <source>Enter a PIVX address</source> <translation>Bir PIVX adresi girin</translation> </message> <message> <source>SAVE</source> <translation>KAYDET</translation> </message> <message> <source>Invalid Contact Address</source> <translation>Geçersiz Bağlantı Adresi</translation> </message> <message> <source>Cannot store your own address as contact</source> <translation>Kendi adresinizi bağlantı adresi olarak kaydedemezsiniz.</translation> </message> <message> <source>Address already stored, label: %1</source> <translation>Adres önceden kaydedilmiş, etiket: %1</translation> </message> <message> <source>New Contact Stored</source> <translation>Yeni Bağlantı Kaydedildi</translation> </message> <message> <source>Error Storing Contact</source> <translation>Bağlantı Kayıt Hatası</translation> </message> <message> <source>Contact edited</source> <translation>Bağlantı güncellendi</translation> </message> <message> <source>Contact edit failed</source> <translation>Bağlantı güncelleme hatası</translation> </message> <message> <source>Delete Contact</source> <translation>Bağlantıyı Sil</translation> </message> <message> <source>You are just about to remove the contact: %1 Are you sure?</source> <translation>Bağlantıyı silmek üzeresiniz: %1 Emin misiniz?</translation> </message> <message> <source>Contact Deleted</source> <translation>Bağlantı Silindi</translation> </message> <message> <source>Error deleting a contact</source> <translation>Bağlantıyı silme hatası</translation> </message> <message> <source>Address copied</source> <translation>Adres kopyalandı</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <source>Passphrase Dialog</source> <translation>Şifre sözcüğü Diyaloğu</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;ten or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Yeni şifre sözcüğünü cüzdana girinLütfen bir şifre sözcüğü kullanın &lt;br/&gt;&lt;b&gt;10 veya daha fazla karakter içeren&lt;/b&gt;veya&lt;b&gt;8 veya daha fazla kelime içeren&lt;/b&gt;</translation> </message> <message> <source>Encrypt wallet</source> <translation>Cüzdanı şifreleyin</translation> </message> <message> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Bu işlem, cüzdan kilidini açmak için cüzdan parolanıza ihtiyaç duyuyor.</translation> </message> <message> <source>Unlock wallet for staking</source> <translation>Cüzdanın kilidini Stake için kaldırın</translation> </message> <message> <source>Unlock wallet</source> <translation>Cüzdanı aç</translation> </message> <message> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Bu işlem, cüzdan şifresini çözmek için cüzdan parolanıza ihtiyaç duyuyor.</translation> </message> <message> <source>Decrypt wallet</source> <translation>cüzdan şifresini çöz</translation> </message> <message> <source>Change passphrase</source> <translation>Şifreyi değiştir</translation> </message> <message> <source>Enter the old and new passphrase to the wallet.</source> <translation>Cüzdanınıza eski ve yeni parolayı girin.</translation> </message> <message> <source>Confirm wallet encryption</source> <translation>cüzdan şifrelemesini onayla</translation> </message> <message> <source>ENCRYPT</source> <translation>ŞİFRELE</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>PIVX will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your PIVs from being stolen by malware infecting your computer.</source> <translation>PIVX şifreleme işlemini tamamlamak için şimdi kapanacaktır. Cüzdanınızı şifrelemenin, PIV'lerinizi bilgisayarınıza bulaşan kötücül yazılımlar tarafından çalınmasına tamamen engelleyemediğini unutmayın</translation> </message> <message> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Cüzdanınızı şifrelemek istediğinizden emin misiniz?</translation> </message> <message> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR PIV&lt;/b&gt;!</source> <translation>Dikkat! Eğer cüzdanınızı şifrelerseniz ve şifrenizi unutursanız &lt;b&gt;Tüm Pıvlerinizi kaybedersiniz&lt;/b&gt;!</translation> </message> <message> <source>Wallet encrypted</source> <translation>Cüzdan şifrelendi</translation> </message> <message> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>Önemli: wallet dosyası ile daha önce yaptığınız yedeklemeler şifreleme sonrası yenisi ile tekrar yedeklenmelidir.Güvenlik nedenleri ile, şifrelenmemiş cüzdanınıza ait yedeklemeler yeni şifrelenmiş cüzdanoınızla birlikte kullanılmaz hale gelecektir.</translation> </message> <message> <source>Wallet encryption failed</source> <translation>Cüzdan şifrelemesi başarısız oldu</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> <message> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Dahili bir hata nedeniyle cüzdan şifrelemesi başarısız oldu. Cüzdanınız şifrelenmedi.</translation> </message> <message> <source>The supplied passphrases do not match.</source> <translation>Verilen parolalar uyuşmuyor.</translation> </message> <message> <source>Wallet unlock failed</source> <translation>Cüzdan kilidini açma başarısız oldu</translation> </message> <message> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>cüzdan şifre çözme işlemi için girilen parola yanlıştı.</translation> </message> <message> <source>Wallet decryption failed</source> <translation>Cüzdan şifre çözme işlemi başarısız oldu</translation> </message> <message> <source>Wallet passphrase was successfully changed.</source> <translation>Cüzdan parolası başarıyla değiştirildi.</translation> </message> <message> <source>Warning: The Caps Lock key is on!</source> <translation>Uyarı: Caps Lock tuşu açık!</translation> </message> </context> <context> <name>BanTableModel</name> <message> <source>IP/Netmask</source> <translation>IP/Netmask</translation> </message> <message> <source>Banned Until</source> <translation>Yasaklılık Bitiş Tarihi</translation> </message> </context> <context> <name>BlockExplorer</name> <message> <source>Blockchain Explorer</source> <translation>Blok zinciri tarayıcı</translation> </message> <message> <source>Back</source> <translation>Geri</translation> </message> <message> <source>Forward</source> <translation>Ileri</translation> </message> <message> <source>Address / Block / Transaction</source> <translation>Adres/Blok/İşlem</translation> </message> <message> <source>Search</source> <translation>Ara</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>Not all transactions will be shown. To view all transactions you need to set txindex=1 in the configuration file (pivx.conf).</source> <translation>Tüm işlemler gösterilmez. Tüm işlemleri görmek için yapılandırma dosyasında txindex = 1 ayarlamanız gerekir (pivx.conf). </translation> </message> </context> <context> <name>CSRow</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Savings</source> <translation>Birikimler</translation> </message> <message> <source>0,00 PIV</source> <translation>0,00 PIV</translation> </message> <message> <source>address</source> <translation>adres</translation> </message> <message> <source>Not Staking</source> <translation>Stake yapılmıyor</translation> </message> <message> <source>Own delegation</source> <translation>Kendi delegasyonum</translation> </message> </context> <context> <name>ClientModel</name> <message> <source>Total: %1 (IPv4: %2 / IPv6: %3 / Tor: %4 / Unknown: %5)</source> <translation>Total: %1 (IPv4: %2 / IPv6: %3 / Tor: %4 / Bilinmeyen: %5)</translation> </message> <message> <source>Network Alert</source> <translation>Ağ Uyarısı</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <source>Quantity:</source> <translation>Miktar</translation> </message> <message> <source>Bytes:</source> <translation>Bytes:</translation> </message> <message> <source>Fee:</source> <translation>Ücret:</translation> </message> <message> <source>Coin Selection</source> <translation>Koin Seçimi</translation> </message> <message> <source>Dust:</source> <translation>Dust</translation> </message> <message> <source>Change:</source> <translation>Fark:</translation> </message> <message> <source>Tree mode</source> <translation>Ağaç modu</translation> </message> <message> <source>List mode</source> <translation>Liste modu</translation> </message> <message> <source>(1 locked)</source> <translation>(1 kilitli)</translation> </message> <message> <source>Amount</source> <translation>Miktar</translation> </message> <message> <source>Transaction Details</source> <translation>İşlem Detayları</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>0.00 PIV</source> <translation>0.00 PIV</translation> </message> <message> <source>0</source> <translation>0</translation> </message> <message> <source>Select all</source> <translation>Tümünü seç</translation> </message> <message> <source>Toggle lock state</source> <translation>Kilit durumunu değiştir</translation> </message> <message> <source>Received with label</source> <translation>Etiket ile alındı</translation> </message> <message> <source>Received with address</source> <translation>Adres ile alındı</translation> </message> <message> <source>After Fee: </source> <translation>Ücret sonrası:</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> <message> <source>Date</source> <translation>Tarih</translation> </message> <message> <source>Confirmations</source> <translation>Doğrulamalar</translation> </message> <message> <source>Confirmed</source> <translation>Doğrulandı</translation> </message> <message> <source>Copy address</source> <translation>Adresi Kopyala</translation> </message> <message> <source>Copy label</source> <translation>Etiketi kopyala</translation> </message> <message> <source>Copy amount</source> <translation>Miktarı kopyala</translation> </message> <message> <source>Copy transaction ID</source> <translation>İşlem Kimliğini kopyala</translation> </message> <message> <source>Lock unspent</source> <translation>Harcanmamış tutarı kilitle</translation> </message> <message> <source>Unlock unspent</source> <translation>Harcanmamış tutarın kilidini aç</translation> </message> <message> <source>Copy quantity</source> <translation>Miktarı kopyala</translation> </message> <message> <source>Copy fee</source> <translation>Ücreti kopyala</translation> </message> <message> <source>Copy after fee</source> <translation>Ücret sonrasını kopyala</translation> </message> <message> <source>Copy bytes</source> <translation>bytes kopyala</translation> </message> <message> <source>Copy priority</source> <translation>öncelik kopyala</translation> </message> <message> <source>Copy dust</source> <translation>dust kopyala</translation> </message> <message> <source>Copy change</source> <translation>fark kopyala</translation> </message> <message> <source>Unselect all</source> <translation>Tüm seçimleri kaldır</translation> </message> <message> <source>Please switch to "List mode" to use this function.</source> <translation>Bu fonksiyonu kullanmak için "Liste moduna" geçin.</translation> </message> <message> <source>highest</source> <translation>en üst</translation> </message> <message> <source>higher</source> <translation>üstün</translation> </message> <message> <source>high</source> <translation>üst</translation> </message> <message> <source>medium-high</source> <translation>orta üst</translation> </message> <message> <source>medium</source> <translation>orta</translation> </message> <message> <source>low-medium</source> <translation>az orta</translation> </message> <message> <source>low</source> <translation>az</translation> </message> <message> <source>lower</source> <translation>daha az</translation> </message> <message> <source>lowest</source> <translation>en az</translation> </message> <message> <source>(%1 locked)</source> <translation>(%1 kilitlendi)</translation> </message> <message> <source>none</source> <translation>hiçbiri</translation> </message> <message> <source>yes</source> <translation>evet</translation> </message> <message> <source>no</source> <translation>hayır</translation> </message> <message> <source>This label turns red, if the transaction size is greater than 1000 bytes.</source> <translation>İşlem boyutu 1000 bayttan büyükse bu etiket kırmızıya döner.</translation> </message> <message> <source>This means a fee of at least %1 per kB is required.</source> <translation>Bu, kB başına en az %1 'lik bir ücret gerektiği anlamına gelir.</translation> </message> <message> <source>Can vary +/- 1 byte per input.</source> <translation>Giriş başına +/- 1 bayt değişebilir.</translation> </message> <message> <source>Transactions with higher priority are more likely to get included into a block.</source> <translation>Daha yüksek önceliğe sahip işlemlerin bir bloğa dahil olma olasılığı daha yüksektir.</translation> </message> <message> <source>This label turns red, if the priority is smaller than "medium".</source> <translation>Öncelik "orta" dan küçükse bu etiket kırmızıya döner.</translation> </message> <message> <source>This label turns red, if any recipient receives an amount smaller than %1.</source> <translation>Herhangi bir alıcı %1 'den küçük bir miktar alırsa, bu etiket kırmızıya döner.</translation> </message> <message> <source>Can vary +/- %1 upiv per input.</source> <translation>Her giriş için +/- %1 upiv farkedebilir.</translation> </message> <message> <source>(no label)</source> <translation>(etiket yok)</translation> </message> <message> <source>change from %1 (%2)</source> <translation>%1 'den (%2) değişim</translation> </message> <message> <source>(change)</source> <translation>(değişiklik)</translation> </message> </context> <context> <name>CoinControlPivWidget</name> <message> <source>Dialog</source> <translation>Diyalog</translation> </message> <message> <source>Transaction Details</source> <translation>İşlem Detayları</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>PIV:</source> <translation>PIV:</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>Quantity:</source> <translation>Miktar</translation> </message> <message> <source>Fee:</source> <translation>Ücret:</translation> </message> <message> <source>Amount</source> <translation>Miktar</translation> </message> <message> <source>Label</source> <translation>Etiket</translation> </message> <message> <source>Address</source> <translation>Adres</translation> </message> <message> <source>Type</source> <translation>Yaz</translation> </message> <message> <source>Date</source> <translation>Tarih</translation> </message> <message> <source>Confirmations</source> <translation>Onaylar</translation> </message> <message> <source>CheckBox</source> <translation>CheckBox</translation> </message> <message> <source>Bytes:</source> <translation>Bytes:</translation> </message> <message> <source>Dust:</source> <translation>Dust</translation> </message> <message> <source>Change:</source> <translation>Fark:</translation> </message> <message> <source>After Fee: </source> <translation>Ücret sonrası:</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> </context> <context> <name>ColdStakingWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Title</source> <translation>Başlık</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>Owner Address (optional, if empty a new address will be created)</source> <translation>Sahip Adresi (opsiyonel, boş bırakılırsa yeni adres yaratılır)</translation> </message> <message> <source>List of delegated balance by address</source> <translation>Delege edilmiş tutarların adrese göre listesi </translation> </message> <message> <source>Total</source> <translation>Toplam</translation> </message> <message> <source>Unconfirmed balance will not be shown</source> <translation>Onaylanmanış bakiye gösterilmez</translation> </message> <message> <source>No balance delegated</source> <translation>Delege edilmiş tutar bulunmuyor</translation> </message> <message> <source>Cold Staking</source> <translation>Soğuk Stakleme</translation> </message> <message> <source>Staker</source> <translation>Stakleyen</translation> </message> <message> <source>Delegation</source> <translation>Delegasyon</translation> </message> <message> <source>You can delegate your PIVs and let a hot node (24/7 online node) stake in your behalf, keeping the keys in a secure place offline.</source> <translation>PIV'lerinizi offline olarak güvenli bir yerde anahtarlarınızı saklarken (24/7 online düğüm) olan bir sıcak düğüm ile adınıza stakeletebilirsiniz.</translation> </message> <message> <source>Add owner address</source> <translation>Sahip adresi ekle</translation> </message> <message> <source>Delegate or Accept MXT delegation</source> <translation>Delege et veya MXT delegasyonu kabul et</translation> </message> <message> <source>Delegate</source> <translation>Delege et</translation> </message> <message> <source>Clear All</source> <translation>Hepsini temizle</translation> </message> <message> <source>Add the staking address</source> <translation>Stakeleme adresi ekle</translation> </message> <message> <source>Delegated balance history</source> <translation>Delege edilmiş tutar tarihçesi</translation> </message> <message> <source>No delegations yet</source> <translation>Henüz bir delegasyon bulunmuyor</translation> </message> <message> <source>Total Staking: %1</source> <translation>Toplam Stakeleme:%1</translation> </message> <message> <source>Error loading delegations: %1</source> <translation>Delegasyonların yüklenmesinde hata:%1</translation> </message> <message> <source>No contacts available, you can go to the contacts screen and add some there!</source> <translation>Uygun adres bulunmuyor, AL ekranına giderek adres ekleyebilirsin.</translation> </message> <message> <source>Cold staking is networkely disabled</source> <translation>Soğuk Stakeleme ağ üzerinde etkin değil</translation> </message> <message> <source>Invalid entry</source> <translation>Geçersiz giriş</translation> </message> <message> <source>Invalid entry, minimum delegable amount is 10 PIV</source> <translation>Geçersiz giriş, delege edilebilir minimum tutar 10 PIV</translation> </message> <message> <source>Owner address invalid</source> <translation>Sahip adresi geçersiz</translation> </message> <message> <source>ALERT!</source> <translation>DİKKAT!</translation> </message> <message> <source>Delegating to an external owner address! The delegated coins will NOT be spendable by this wallet. Spending these coins will need to be done from the wallet or device containing the owner address. Do you wish to proceed?</source> <translation>Dış bir adrese delege ediliyor! Delege edilmiş koinler bu cüzdan tarafından harcanamaz. Bu koinler owner adresinin olduğu cüzdan veya cihaz tarafından harcanabilir. Devam etmek istiyor musunuz?</translation> </message> <message> <source>Staking address corresponds to this wallet, change it to an external node</source> <translation>Stakeleme adresi bu cüzdana karşılık gelmektedir, dış bir düğüme değiştir</translation> </message> <message> <source>Cannot create transaction.</source> <translation>İşlem yaratılamıyor.</translation> </message> <message> <source>Coins delegated</source> <translation>Koinler delege edildi</translation> </message> <message> <source>You don't have any MXT to select.</source> <translation>Seçebileceğiniz PIV'iniz bulunmuyor</translation> </message> <message> <source>URI copied to clipboard</source> <translation>URI panoya kopyalandı</translation> </message> <message> <source>Address copied to clipboard</source> <translation>Adres panoya kopyalandı</translation> </message> <message> <source>Copy</source> <translation>Kopyala</translation> </message> <message> <source>Edit</source> <translation>Güncelle</translation> </message> <message> <source>Stake</source> <translation>Stake</translation> </message> <message> <source>Blacklist</source> <translation>Karaliste</translation> </message> <message> <source>Edit Label</source> <translation>Etiketi Düzenle</translation> </message> <message> <source>Copy owner address</source> <translation>Sahip adresi kopyala</translation> </message> <message> <source>Address copied</source> <translation>Adres kopyalandı</translation> </message> <message> <source>Edit Cold Address Label</source> <translation>Soğuk Adresin Etiketini Düzenle</translation> </message> <message> <source>Whitelist failed, please check the logs</source> <translation>Beyazliste başarısız oldu, logları kontrol ediniz</translation> </message> <message> <source> staking!</source> <translation>Stakeleniyor!</translation> </message> <message> <source>Blacklist failed, please check the logs</source> <translation>Karaliste başarısız oldu, logları kontrol ediniz</translation> </message> <message> <source> blacklisted from staking</source> <translation>Stakeleme için karalisteye alındı</translation> </message> <message> <source>Owner address copied</source> <translation>Sahip adresi kopyalandı</translation> </message> <message> <source>Edit Owner Address Label</source> <translation>Sahip Adresinin Etiketini Düzenle</translation> </message> <message> <source>Address label saved</source> <translation>Adres etiketi kaydedildi</translation> </message> <message> <source>Error storing address label</source> <translation>Adres etiketini kaydederken hata oluştu</translation> </message> </context> <context> <name>ContactDropdownRow</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Bob Allen</source> <translation>Bob Allen</translation> </message> <message> <source>DN6i46dytMPVhV1JMGZFuQBh7BZZ6nNLox</source> <translation>DN6i46dytMPVhV1JMGZFuQBh7BZZ6nNLox</translation> </message> </context> <context> <name>DashboardWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Transactions</source> <translation>İşlemler</translation> </message> <message> <source>You can see here the history of your account</source> <translation>Burada hesabınızın geçmişini görebilirsiniz.</translation> </message> <message> <source>Warning</source> <translation>Uyarı</translation> </message> <message> <source>No transactions</source> <translation>İşlem bulunmuyor</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>Staking Rewards</source> <translation>Stake ödülleri</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>Staking statistics</source> <translation>Stake İstatistikleri</translation> </message> <message> <source>PIV</source> <translation>PIV</translation> </message> <message> <source>zMXT</source> <translation>zMXT</translation> </message> <message> <source>Sort by</source> <translation>Sırala</translation> </message> <message> <source>Days</source> <translation>Gün</translation> </message> <message> <source>Months</source> <translation>Ay</translation> </message> <message> <source>Years</source> <translation>Yıl</translation> </message> <message> <source>Filter by</source> <translation>Filtrele</translation> </message> <message> <source>LabelText</source> <translation>LabelText</translation> </message> <message> <source>You can view your account's history</source> <translation>Hesap geçmişinizde görebilirsiniz</translation> </message> <message> <source>Amount of MXT and zMXT staked.</source> <translation>Stake edilmiş MXT ve zMXT miktarı</translation> </message> <message> <source>All</source> <translation>Tümü</translation> </message> <message> <source>Received</source> <translation>Alındı</translation> </message> <message> <source>Sent</source> <translation>Gönderildi</translation> </message> <message> <source>Mined</source> <translation>Mined </translation> </message> <message> <source>Minted</source> <translation>Mint edildi </translation> </message> <message> <source>MN reward</source> <translation>MN Ödülü</translation> </message> <message> <source>To yourself</source> <translation>Kendine</translation> </message> <message> <source>Cold stakes</source> <translation>Soğuk Stakeler</translation> </message> <message> <source>Hot stakes</source> <translation>Sıcak Stakeler</translation> </message> <message> <source>Delegated</source> <translation>Delege edilmiş</translation> </message> <message> <source>Delegations</source> <translation>Delegasyonlar</translation> </message> <message> <source>Please wait until the wallet is fully synced to see your correct balance</source> <translation>Doğru bakiyenizi görmek için cüzdanınızın senkronizasyonunun tamamlanmasını bekleyiniz.</translation> </message> <message> <source>No transactions yet</source> <translation>İşlem bulunmuyor</translation> </message> <message> <source>How to get MXT or zMXT</source> <translation>PIV veya zMXT nasıl alınır</translation> </message> <message> <source>You can verify the staking activity in the status bar at the top right of the wallet. It will start automatically as soon as the wallet has enough confirmations on any unspent balances, and the wallet has synced.</source> <translation>Cüzdanınızın sağ üst tarafındaki durum çubuğundan stake aktivitelerinizi denetleyebilirsiniz Cüzdanınız senkronize olduktan ve harcanmamış bakiyeniz yeterli miktarda onaya ulaştıktan sonra staking otomatik olarak başlayacaktır.</translation> </message> <message> <source>You have no staking rewards</source> <translation>Henüz stake ödülünüz bulunmuyor.</translation> </message> <message> <source>No charts library</source> <translation>Grafik kitaplığı yok</translation> </message> <message> <source>Loading chart..</source> <translation>Grafik yükleniyor</translation> </message> <message> <source>Error loading chart, invalid show option</source> <translation>Grafik yükleme hatası, geçersiz gösterim seçeneği</translation> </message> <message> <source>Error loading chart, invalid data</source> <translation>Grafik yükleme hatası, geçersiz bilgi</translation> </message> <message> <source>Error loading chart: %1</source> <translation>Grafik yükleme hatası: %1</translation> </message> </context> <context> <name>DefaultDialog</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Transaction Details</source> <translation>İşlem Detayları</translation> </message> <message> <source>Text Label</source> <translation>Etiket Yazısı</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>SAVE</source> <translation>KAYDET</translation> </message> </context> <context> <name>DenomGenerationDialog</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Transaction Details</source> <translation>İşlem Detayları</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>This will unlock your wallet fully, so that anyone with access to it can spend until the wallet is closed or locked again.</source> <translation>Bu cüzdanınızın kilidini kaldıracaktır, dolayısıyla erişimi olan herkes cüzdan kapatılana kadar veya tekrar kilitlenene kadar harcama yapabilir.</translation> </message> <message> <source>5000</source> <translation>5000</translation> </message> <message> <source>100</source> <translation>100</translation> </message> <message> <source>10</source> <translation>10</translation> </message> <message> <source>1000</source> <translation>1000</translation> </message> <message> <source>50</source> <translation>50</translation> </message> <message> <source>500</source> <translation>500</translation> </message> <message> <source>5</source> <translation>5</translation> </message> <message> <source>1</source> <translation>1</translation> </message> <message> <source>Select All</source> <translation>Tümünü Seç</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>SAVE</source> <translation>KAYDET</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <source>Edit Address</source> <translation>Adres Düzenle</translation> </message> <message> <source>&amp;Label</source> <translation>&amp;Etiket</translation> </message> <message> <source>The label associated with this address list entry</source> <translation>Bu adres listesi girişiyle ilişkilendirilen etiket</translation> </message> <message> <source>&amp;Address</source> <translation>&amp;Adres</translation> </message> <message> <source>The address associated with this address list entry. This can only be modified for sending addresses.</source> <translation>Bu adres listesi girişiyle ilişkili adres. Bu, yalnızca adres göndermek için değiştirilebilir.</translation> </message> <message> <source>New receiving address</source> <translation>Yeni alıcı adresi</translation> </message> <message> <source>New sending address</source> <translation>Yeni gönderen adres</translation> </message> <message> <source>Edit receiving address</source> <translation>Alma adresi düzenle</translation> </message> <message> <source>Edit sending address</source> <translation>Gönderen adresini düzenle</translation> </message> <message> <source>The entered address "%1" is not a valid PIVX address.</source> <translation>Girilen "%1" adresi geçerli bir PIVX adresi değil.</translation> </message> <message> <source>The entered address "%1" is already in the address book.</source> <translation>Girilen "%1" adresi zaten adres defterinde.</translation> </message> <message> <source>Could not unlock wallet.</source> <translation>cüzdan kilidi açılamadı.</translation> </message> <message> <source>New key generation failed.</source> <translation>Yeni anahtar üretimi başarısız oldu.</translation> </message> </context> <context> <name>ExpandableButton</name> <message> <source>Form</source> <translation>Form</translation> </message> </context> <context> <name>FreespaceChecker</name> <message> <source>A new data directory will be created.</source> <translation>Yeni bir veri dizini oluşturulacak</translation> </message> <message> <source>name</source> <translation>isim</translation> </message> <message> <source>Directory already exists. Add %1 if you intend to create a new directory here.</source> <translation>Dizin zaten var. Burada yeni bir dizin oluşturmak istiyorsanız %1 ekleyin.</translation> </message> <message> <source>Path already exists, and is not a directory.</source> <translation>Yol zaten var ve bir dizin değil.</translation> </message> <message> <source>Cannot create data directory here.</source> <translation>Burada veri dizini oluşturulamıyor.</translation> </message> </context> <context> <name>GovernancePage</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>GOVERNANCE</source> <translation>YÖNETİM</translation> </message> <message> <source>Update Proposals</source> <translation>Teklifleri Güncelle</translation> </message> <message> <source>Next super block:</source> <translation>Sıradaki süper blok</translation> </message> <message> <source>0</source> <translation>0</translation> </message> <message> <source>Blocks to next super block:</source> <translation>Sıradaki süper bloğa kadarki blok sayısı:</translation> </message> <message> <source>Days to budget payout (estimate):</source> <translation>Bütçe ödemesine kalan gün (tahmini):</translation> </message> <message> <source>Allotted budget:</source> <translation>Ayrılmış Bütçe:</translation> </message> <message> <source>Budget left:</source> <translation>Kalan Bütçe:</translation> </message> <message> <source>Masternodes count:</source> <translation>Masternode Sayısı:</translation> </message> </context> <context> <name>HelpMessageDialog</name> <message> <source>version</source> <translation>versiyon</translation> </message> <message> <source>PIVX Core</source> <translation>PIVX CORE</translation> </message> <message> <source>(%1-bit)</source> <translation>(%1-bit)</translation> </message> <message> <source>About PIVX Core</source> <translation>PIVX Core Hakkında </translation> </message> <message> <source>Command-line options</source> <translation>Command-line seçenekleri</translation> </message> <message> <source>Usage:</source> <translation>Kullanım:</translation> </message> <message> <source>command-line options</source> <translation>Komut-istemi seçenekleri:</translation> </message> <message> <source>UI Options:</source> <translation>Kullanıcı arayüzü seçenekleri:</translation> </message> <message> <source>Choose data directory on startup (default: %u)</source> <translation>Başlangıçta veri dizini seçin (varsayılan: %u)</translation> </message> <message> <source>Show splash screen on startup (default: %u)</source> <translation>Başlangıçta açılış ekranı göster (varsayılan: %u)</translation> </message> <message> <source>Set language, for example "de_DE" (default: system locale)</source> <translation>Dil ayarla, örneğin "de_DE" (varsayılan: sistem yerel dili)</translation> </message> <message> <source>Start minimized</source> <translation>Küçültülmüş olarak başlat</translation> </message> <message> <source>Set SSL root certificates for payment request (default: -system-)</source> <translation>Ödeme talebi için SSL kök sertifikaları belirleyin (default: -system-)</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> </context> <context> <name>Intro</name> <message> <source>Welcome</source> <translation>Hoşgeldiniz</translation> </message> <message> <source>Welcome to PIVX Core.</source> <translation>PIVX Core'a hoşgeldiniz.</translation> </message> <message> <source>As this is the first time the program is launched, you can choose where PIVX Core will store its data.</source> <translation>Program ilk başlatıldığında PIVX Core'un verilerini nerede saklayacağını seçebilirsiniz.</translation> </message> <message> <source>PIVX Core will download and store a copy of the PIVX block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source> <translation>PIVX Core, PIVX blok zincirinin bir kopyasını indirecek ve depolayacaktır. Bu dizinde en az %1GB veri saklanacak ve zamanla büyüyecektir. Ayrıca cüzdanınız da bu dizinde saklanır.</translation> </message> <message> <source>Use the default data directory</source> <translation>Varsayılan veri dizinini kullanınız</translation> </message> <message> <source>Use a custom data directory</source> <translation>Özel bir dizin kullan</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> <message> <source>PIVX Core</source> <translation>PIVX Core</translation> </message> <message> <source>Error: Specified data directory "%1" cannot be created.</source> <translation>Hata: Belirtilen veri dizini "%1" oluşturulamıyor.</translation> </message> <message> <source>Error</source> <translation>hata</translation> </message> <message> <source>%1 GB of free space available</source> <translation>%1 GB boş alan mevcut</translation> </message> <message> <source>(of %1 GB needed)</source> <translation>( %1 i GB gerekli)</translation> </message> </context> <context> <name>LoadingDialog</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>Loading</source> <translation>Yükleniyor</translation> </message> <message> <source>.</source> <translation>.</translation> </message> </context> <context> <name>LockUnlock</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Unlock Wallet</source> <translation>Cüzdanın Kilidini Aç</translation> </message> <message> <source>Lock Wallet</source> <translation>Cüzdanı Kilitle</translation> </message> <message> <source>Staking Only</source> <translation>Sadece Stake için</translation> </message> </context> <context> <name>MNRow</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>user_masternode</source> <translation>kullanıcı_masternode</translation> </message> <message> <source>Address: 88.26.164.88:51315</source> <translation>Adres: 88.26.164.88:51315</translation> </message> <message> <source>Jan. 19, 2019</source> <translation>Ocak. 12,2019</translation> </message> </context> <context> <name>MasterNodeWizardDialog</name> <message> <source>Dialog</source> <translation>Diyalog</translation> </message> <message> <source>1</source> <translation>1</translation> </message> <message> <source>2</source> <translation>2</translation> </message> <message> <source>3</source> <translation>3</translation> </message> <message> <source>Intro</source> <translation>Giriş</translation> </message> <message> <source>Name</source> <translation>İsim</translation> </message> <message> <source>Address</source> <translation>Adres</translation> </message> <message> <source>Create New Master Node Controller</source> <translation>Yeni bir Masternode Kontrolörü Yarat</translation> </message> <message> <source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;To create a PIVX Masternode you must dedicate 5.000 MXT (the unit of PIVX) to the network (however, these coins are still yours and will never leave your possession). &lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;You can deactivate the node and unlock the coins at any time.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source> <translation>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;Yeni bir Masternode yaratmak için ağa 5.000 MXT (PIVX birimi) ayırmanız gerekmektedir (buna rağmen koinler hala size aittir ve sahipliğini kororsunuz). &lt;/p&gt;&lt;p&gt;&lt;/p&gt;&lt;p&gt;İstediğiniz zaman devre dışı bırakabilir ve koinlerinizin kilidini kaldırabilirsiniz.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</translation> </message> <message> <source>Make sure you have this amount of coins.</source> <translation>Bu miktarda koininizin olduğundan emin olunuz.</translation> </message> <message> <source>Set Master Node’s Name</source> <translation>Masternode'unuzun Adını Belirleyin</translation> </message> <message> <source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;A transaction of 5,000 MXT will be made&lt;/p&gt;&lt;p&gt;to a new empty address in your wallet.&lt;/p&gt;&lt;p&gt;The Address is labeled under the master node's name.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source> <translation>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;5.000 MXT'lik bir transfer &lt;/p&gt;&lt;p&gt;yeni ve boş bir adrese yapılacaktır.&lt;/p&gt;&lt;p&gt;Adres Masternode'unuz ismi ile etiketlenecektir.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</translation> </message> <message> <source>Set Master Node’s IP and port</source> <translation>MasterNode'unuzun IP ve Portunu giriniz</translation> </message> <message> <source>Address of the node that must always be online running the actual master node.</source> <translation>Gerçek Masternode'un çalıştığı adres herzaman online olmak zorundadır.</translation> </message> <message> <source>Enter IP address</source> <translation>IP adresini girin</translation> </message> <message> <source>Enter Port</source> <translation>Port'u girin</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> <message> <source>e.g user_masternode</source> <translation>örn. kullanıcı_masternode</translation> </message> <message> <source>NEXT</source> <translation>İLERİ</translation> </message> <message> <source>BACK</source> <translation>GERİ</translation> </message> <message> <source>address label cannot be empty</source> <translation>Adres etiketi boş olamaz</translation> </message> <message> <source>IP or port cannot be empty</source> <translation>IP ve Port boş olamaz</translation> </message> <message> <source>Invalid port number</source> <translation>Geçersiz port numarası</translation> </message> <message> <source>Prepare master node failed..</source> <translation>Masternode hazırlığı başarısız..</translation> </message> <message> <source>Invalid masternode.conf file</source> <translation>Geçersiz masternode.conf dosyası</translation> </message> <message> <source>Error parsing masternode.conf file</source> <translation> masternode.conf dosyası çözümleme hatası</translation> </message> <message> <source>Invalid collaterall output index</source> <translation>Geçersiz teminat çıktı endeksi</translation> </message> <message> <source>Master node created!</source> <translation>Masternode yaratıldı!</translation> </message> <message> <source>masternode.conf file doesn't exists</source> <translation> masternode.conf dosyası bulunamadı</translation> </message> <message> <source>The recipient address is not valid, please recheck.</source> <translation>Alıcı adresi geçerli değil, lütfen tekrar kontrol edin.</translation> </message> <message> <source>The amount to pay must be larger than 0.</source> <translation>Ödenecek tutar 0'dan büyük olmalıdır.</translation> </message> <message> <source>The amount exceeds your balance.</source> <translation>Miktar, bakiyenizi aşıyor.</translation> </message> <message> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Toplam, %1 işlem ücreti dahil edildiğinde bakiyenizi aşıyor.</translation> </message> <message> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Yinelenen adres bulundu, her gönderme işleminde aynı adres birden fazla girilemez.</translation> </message> <message> <source>Transaction creation failed!</source> <translation>İşlem oluşturulamadı!</translation> </message> <message> <source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>İşlem reddedildi! Bu, cüzdanındaki koinlerin bir kısmının harcanması durumunda olabilir; mesela, wallet.dat dosyasının bir kopyasını kullandıysanız ve koinlerinizi harcamış fakat cüzdana harcanmış olarak işaretlenmemişse ortaya çıkabilir.</translation> </message> <message> <source>Error: The wallet was unlocked only to anonymize coins.</source> <translation>Hata: Cüzdan sadece paraları anonimleştirmek için açıldı.</translation> </message> <message> <source>A fee %1 times higher than %2 per kB is considered an insanely high fee.</source> <translation>Bir ücret, kB başına %2'nin %1 katından daha yüksektir, delicesine yüksek bir ücret olarak kabul edilir.</translation> </message> <message> <source>Error: The wallet was unlocked only to anonymize coins. Unlock canceled.</source> <translation>Hata: Cüzdan sadece paraları anonimleştirmek için açıldı. Kilidini açma işlemi iptal edildi.</translation> </message> </context> <context> <name>MasterNodesWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>No active Master Node yet</source> <translation>Henüz aktif Masternode bulunmuyor</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>Masternodes</source> <translation>Masternode'lar</translation> </message> <message> <source>Full nodes that incentivize node operators to perform the core consensus functions and vote on the treasury system receiving a periodic reward.</source> <translation>Düğüm operatörlerinin temel konsensüs fonksiyonlarını teşvik eden tam düğümler hazine sisteminde oy kullanarak düzenli ödüller kazanırlar. </translation> </message> <message> <source>Create Masternode Controller</source> <translation>Masternode Kontrolörü Yarat</translation> </message> <message> <source>No active Masternode yet</source> <translation>Henüz Aktif Masternode bulunmuyor</translation> </message> <message> <source>Start</source> <translation>Başlat</translation> </message> <message> <source>Delete</source> <translation>Sil</translation> </message> <message> <source>Info</source> <translation>Bilgi</translation> </message> <message> <source>Start Masternode</source> <translation>Masternode'u başlat</translation> </message> <message> <source>Are you sure you want to start masternode %1? </source> <translation>Masternode %1 'i başlatmak istediğinize emin misiniz? </translation> </message> <message> <source>Cannot start masternode, the collateral transaction has not been accepted by the network. Please wait few more minutes.</source> <translation>Masternode başlatılamadı, teminat transferi henüz ağ tarafından kabul edilmedi Lütfen birkaç dakika bekleyiniz.</translation> </message> <message> <source>Remote Masternode Data</source> <translation>Uzak Masternode Bilgisi</translation> </message> <message> <source>You are just about to export the required data to run a Masternode on a remote server to your clipboard. You will only have to paste the data in the pivx.conf file of your remote server and start it, then start the Masternode using this controller wallet (select the Masternode in the list and press "start"). </source> <translation>Uzak bir sunucuda Masternode çalıştırabilmek için gereken bilgiyi panoya almak üzeresiniz. Bilgiyi uzak sunucunuzdaki pivx.conf dosyasına yapıştırmanız ve başlatmanız gerekecektir, ondan sonra bu kontrol cüzdanını kullanan Masternodu başlatın (Listeden Masternodu seçip "başlat" tuşuna basın). </translation> </message> <message> <source>Masternode exported!, check your clipboard</source> <translation>Masternode bilgisi dışarı alındı! Panoyu kontrol edin</translation> </message> <message> <source>Delete Masternode</source> <translation>Masternode'u Sil</translation> </message> <message> <source>You are just about to delete Masternode: %1 Are you sure?</source> <translation>Masternode'u silmek üzeresiniz %1 Emin misiniz?</translation> </message> <message> <source>Invalid masternode.conf file</source> <translation>Geçersiz masternode.conf dosyası</translation> </message> <message> <source>Error parsing masternode.conf file</source> <translation> masternode.conf dosyası çözümleme hatası</translation> </message> <message> <source>masternode.conf file doesn't exists</source> <translation> masternode.conf dosyası bulunamadı</translation> </message> <message> <source>Not enough balance to create a masternode, 5,000 MXT required.</source> <translation>Masternode yaratmak için yetersiz bakiye, 5,000 MXT gerekmektedir.</translation> </message> <message> <source>Error creating masternode</source> <translation>Masternode yaratma hatası</translation> </message> </context> <context> <name>MnInfoDialog</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Master Node Information</source> <translation>Masternode Bilgisi</translation> </message> <message> <source>Public Key:</source> <translation>Public Key:</translation> </message> <message> <source>492526e7fa3c810b35016...40a5df85ee227ab00b1156994</source> <translation>492526e7fa3c810b35016...40a5df85ee227ab00b1156994</translation> </message> <message> <source>Address: </source> <translation>Adres</translation> </message> <message> <source>127.0.0.2:43223</source> <translation>127.0.0.2:43223</translation> </message> <message> <source>Collaterall tx id:</source> <translation>Teminat tx id:</translation> </message> <message> <source>Output index:</source> <translation>Çıktı Endeksi:</translation> </message> <message> <source>1 Inputs</source> <translation>1 çıktı</translation> </message> <message> <source>Status:</source> <translation>Durum:</translation> </message> <message> <source>MISSING</source> <translation>EKSİK</translation> </message> <message> <source>Export data to run the Master Node on a remote server</source> <translation>Uzak sunucuda Masternode çalıştırmak için bilgiyi dışarı aktar</translation> </message> </context> <context> <name>MyAddressRow</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Savings</source> <translation>Birikimler</translation> </message> <message> <source>Jan. 19, 2019</source> <translation>Ocak. 12,2019</translation> </message> <message> <source>DN6i46dytMPVhV1JMGZFuQBh7BZZ6nNLox</source> <translation>DN6i46dytMPVhV1JMGZFuQBh7BZZ6nNLox</translation> </message> </context> <context> <name>NavMenuWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>V 1.0.0</source> <translation>V 1.0.0</translation> </message> <message> <source>v%1</source> <translation>v%1</translation> </message> </context> <context> <name>OpenURIDialog</name> <message> <source>Open URI</source> <translation>Open URI</translation> </message> <message> <source>Open payment request from URI or file</source> <translation>URI veya dosyadan ödeme isteğini aç</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> <message> <source>Select payment request file</source> <translation>Ödeme isteği dosyasını seçin</translation> </message> <message> <source>Select payment request file to open</source> <translation>Açılması gereken ödeme isteği dosyasını seçin</translation> </message> <message> <source>File not found</source> <translation>Dosya bulunamadı</translation> </message> <message> <source>Parsed data too large</source> <translation>Ayrıştırılmış bilgi çok büyük</translation> </message> <message> <source>Invalid URI, not starting with "pivx" prefix</source> <translation>Geçersiz URI, "pivx" ön adıyla başlamıyor</translation> </message> </context> <context> <name>OptionButton</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <source>Options</source> <translation>Seçenekler</translation> </message> <message> <source>&amp;Main</source> <translation>&amp;Main</translation> </message> <message> <source>Size of &amp;database cache</source> <translation>&amp; Veritabanı önbellek boyutu</translation> </message> <message> <source>MB</source> <translation>MB</translation> </message> <message> <source>Number of script &amp;verification threads</source> <translation>Komut dosyası ve doğrulama iş parçacığı sayısı</translation> </message> <message> <source>(0 = auto, &lt;0 = leave that many cores free)</source> <translation>(0 = auto, &lt;0 = birçok çekirdeği serbest bırak)</translation> </message> <message> <source>W&amp;allet</source> <translation>Cüzdan</translation> </message> <message> <source>If you disable the spending of unconfirmed change, the change from a transaction&lt;br/&gt;cannot be used until that transaction has at least one confirmation.&lt;br/&gt;This also affects how your balance is computed.</source> <translation>Doğrulanmamış değişikliğin harcamasını devre dışı bırakırsanız, bir işlemdeki değişiklik&lt;br/&gt;Bu işlem en az bir onay alıncaya kadar kullanılamaz.&lt;br/&gt;Bu aynı zamanda bakiyenizin nasıl hesaplandığını etkiler.</translation> </message> <message> <source>Automatically open the PIVX client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Yönelticideki PIVX istemci bağlantı noktasını otomatik olarak açın. Bu, yalnızca yönlendiriciniz UPnP'yi destekliyorsa ve etkinleştirilmişse çalışır.</translation> </message> <message> <source>Accept connections from outside</source> <translation>Dışarıdan bağlantıları kabul et</translation> </message> <message> <source>Allow incoming connections</source> <translation>Gelen bağlantılara izin ver</translation> </message> <message> <source>&amp;Connect through SOCKS5 proxy (default proxy):</source> <translation>&amp;Connect through SOCKS5 proxy (default proxy):</translation> </message> <message> <source>Expert</source> <translation>Uzman</translation> </message> <message> <source>Automatically start PIVX after logging in to the system.</source> <translation>Sisteme giriş yaptıktan sonra otomatik olarak PIVX'i başlatın.</translation> </message> <message> <source>&amp;Start PIVX on system login</source> <translation>&amp; Sistem oturum açma işleminde PIVX'i başlat</translation> </message> <message> <source>Whether to show coin control features or not.</source> <translation>Para kontrol özelliklerini gösterip göstermeyeceği.</translation> </message> <message> <source>Enable coin &amp;control features</source> <translation>Köşe ve kontrol özelliklerini etkinleştir</translation> </message> <message> <source>Show additional tab listing all your masternodes in first sub-tab&lt;br/&gt;and all masternodes on the network in second sub-tab.</source> <translation>Tüm alt düğmelerinizi ilk alt sekmede listeleyen ek sekme gösterin&lt;br/&gt;ve ikinci alt sekmesindeki ağdaki tüm masternodları</translation> </message> <message> <source>Show Masternodes Tab</source> <translation>Masternode'lar Sekmesini Göster </translation> </message> <message> <source>&amp;Spend unconfirmed change</source> <translation>Doğrulanmamış değişikliği harcama</translation> </message> <message> <source>&amp;Network</source> <translation>&amp;Ağ</translation> </message> <message> <source>The user interface language can be set here. This setting will take effect after restarting PIVX.</source> <translation>Kullanıcı arabirimi dili burada ayarlanabilir. Bu ayar, PIVX yeniden başlatıldıktan sonra geçerli olur.</translation> </message> <message> <source>Language missing or translation incomplete? Help contributing translations here: https://www.transifex.com/pivx-project/pivx-project-translations</source> <translation>Dil eksik veya çeviri tamamlanmamış mı? Katkıda bulunan çevirilere yardımcı olun: https://www.transifex.com/pivx-project/pivx-project-translations</translation> </message> <message> <source>Map port using &amp;UPnP</source> <translation>Map port using &amp;UPnP</translation> </message> <message> <source>Enable automatic minting of MXT units to zMXT</source> <translation>PIV birimlerinin zMXT'e otomatik basımını ektinleştir</translation> </message> <message> <source>Enable zMXT Automint</source> <translation>zMXT Otomatik basımı etkinleştir</translation> </message> <message> <source>Enable automatic zMXT minting from specific addresses</source> <translation>Belirli bir adresten otomatik zMXT minting yapılmasını etkinleştir</translation> </message> <message> <source>Enable Automint Addresses</source> <translation>Automint adreslerini etkinleştir</translation> </message> <message> <source>Percentage of incoming MXT which get automatically converted to zMXT via Zerocoin Protocol (min: 10%)</source> <translation>Zerocoin Protokolü üzerinden otomatik olarak zMXT'ye dönüştürülen gelen MXT yüzdesi (dakika: 10%)</translation> </message> <message> <source>Percentage of autominted zMXT</source> <translation>Otomatikleştirilmiş zMXT yüzdesi</translation> </message> <message> <source>Wait with automatic conversion to Zerocoin until enough MXT for this denomination is available</source> <translation>Bu bölen için yeterli MXT bulunana kadar Zerocoin'e otomatik dönüştürmede kalın</translation> </message> <message> <source>Preferred Automint zMXT Denomination</source> <translation>Tercih edilen Automint zMXT Nominal değeri</translation> </message> <message> <source>Stake split threshold:</source> <translation>Stake bölme eşiği</translation> </message> <message> <source>Connect to the PIVX network through a SOCKS5 proxy.</source> <translation>Bir SOCKS5 proxy vasıtasıyla PIVX ağına bağlanın.</translation> </message> <message> <source>Proxy &amp;IP:</source> <translation>Proksi &amp;IP:</translation> </message> <message> <source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source> <translation>Proksinin IP adresi (örn. IPv4: 127.0.0.1 / IPv6: ::1)</translation> </message> <message> <source>&amp;Port:</source> <translation>&amp;Port:</translation> </message> <message> <source>Port of the proxy (e.g. 9050)</source> <translation>Port of the proxy (e.g. 9050)</translation> </message> <message> <source>&amp;Window</source> <translation>&amp;Window</translation> </message> <message> <source>Show only a tray icon after minimizing the window.</source> <translation>Pencereyi küçülttükten sonra yalnızca bir tepsi simgesi gösterin.</translation> </message> <message> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>Görev çubuğu yerine tepsiye simge durumuna küçült</translation> </message> <message> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Pencere kapatıldığında uygulamadan çıkmak yerine simge durumuna küçültün. Bu seçenek etkinleştirildiğinde, uygulama yalnızca menüden Çık'ı seçtikten sonra kapatılacaktır.</translation> </message> <message> <source>M&amp;inimize on close</source> <translation>Kapama tuşuna basınca küçült</translation> </message> <message> <source>&amp;Display</source> <translation>&amp;Göster</translation> </message> <message> <source>User Interface &amp;language:</source> <translation>Kullanıcı Arayüzü ve dil:</translation> </message> <message> <source>User Interface Theme:</source> <translation>Kullanıcı Arabirimi Teması:</translation> </message> <message> <source>Unit to show amounts in:</source> <translation>Tutarların gösterileceği birim: </translation> </message> <message> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Koin gönderirken arayüzde görülmesi için varsayılan bölme miktarını seçin.</translation> </message> <message> <source>Decimal digits</source> <translation>Ondalık basamak</translation> </message> <message> <source>Hide empty balances</source> <translation>Boş bakiyeleri sakla</translation> </message> <message> <source>Hide orphan stakes in transaction lists</source> <translation>Kimsesiz stakeleri işlemler listesinde gizle</translation> </message> <message> <source>Hide orphan stakes</source> <translation>Kimsesiz Stake'leri gizle</translation> </message> <message> <source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source> <translation>İşlemler sekmesinde içerik menüsü öğeleri olarak görünen üçüncü taraf URL'leri (ör. Bir blok araştırıcısı). URL'deki %s yerine işlem karması verildi. Birden fazla URL, dikey çubukla | | ile ayrılır.</translation> </message> <message> <source>Third party transaction URLs</source> <translation>Üçüncü taraf işlem URL'leri</translation> </message> <message> <source>Active command-line options that override above options:</source> <translation>Yukarıdaki seçenekleri geçersiz kılan etkin komut satırı seçenekleri:</translation> </message> <message> <source>Reset all client options to default.</source> <translation>Tüm istemci seçeneklerini varsayılana sıfırlayın.</translation> </message> <message> <source>&amp;Reset Options</source> <translation>&amp;Ayarları sıfırla</translation> </message> <message> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <source>&amp;Cancel</source> <translation>&amp;İptal</translation> </message> <message> <source>Any</source> <translation>Herhangi</translation> </message> <message> <source>default</source> <translation>varsayılan</translation> </message> <message> <source>none</source> <translation>hiçbiri</translation> </message> <message> <source>Confirm options reset</source> <translation>Seçeneklerin sıfırlanmasını onayla</translation> </message> <message> <source>Client restart required to activate changes.</source> <translation>Değişiklikleri etkinleştirmek için programın yeniden başlatılması gerekiyor.</translation> </message> <message> <source>Client will be shutdown, do you want to proceed?</source> <translation>Cüzdan kapanacak, devam etmek istiyor musunuz?</translation> </message> <message> <source>This change would require a client restart.</source> <translation>Bu Değişiklik cüzdanın yeniden başlatılmasını gerektirecek.</translation> </message> <message> <source>The supplied proxy address is invalid.</source> <translation>Verilen proxy adresi geçersiz.</translation> </message> <message> <source>The supplied proxy port is invalid.</source> <translation>Verilen proxy port adresi geçersiz.</translation> </message> <message> <source>The supplied proxy settings are invalid.</source> <translation>Verilen proxy seçenekleri geçersiz.</translation> </message> </context> <context> <name>PIVXGUI</name> <message> <source>PIVX Core</source> <translation>PIVX CORE</translation> </message> <message> <source>Wallet</source> <translation>Cüzdan</translation> </message> <message> <source>Node</source> <translation>Düğüm</translation> </message> <message> <source>&amp;Show / Hide</source> <translation>Göster/Gizle</translation> </message> <message> <source>Show or hide the main Window</source> <translation>Ana pencereyi göster veya gizle</translation> </message> <message> <source>E&amp;xit</source> <translation>Çıkış</translation> </message> <message> <source>Quit application</source> <translation>Uygulamayı sonlandır</translation> </message> <message> <source>PIVX Core client</source> <translation>PIVX Core istemci</translation> </message> <message> <source>Error</source> <translation>Hata</translation> </message> <message> <source>Warning</source> <translation>Uyarı</translation> </message> <message> <source>Information</source> <translation>Bilgi</translation> </message> <message> <source>System Message</source> <translation>Sistem Mesajı</translation> </message> <message> <source>Sent MultiSend transaction</source> <translation>ÇokluGönderi işlemi gönderildi</translation> </message> <message> <source>Sent transaction</source> <translation>Gönderilmiş işlem</translation> </message> <message> <source>Incoming transaction</source> <translation>Gelen işlem</translation> </message> <message> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Tarih: %1 Miktar: %2 Tip: %3 Adres: %4 </translation> </message> <message> <source>A fatal error occurred. PIVX can no longer continue safely and will quit.</source> <translation>Onarılamaz bir hata oluştu. PIVX güvenli bir şekilde çalışamaz, kapanacaktır.</translation> </message> </context> <context> <name>PWidget</name> <message> <source>Wallet locked, you need to unlock it to perform this action</source> <translation>Cüzdan kilitli, bu işlemi yapabilmek için cüzdanın kilidini açmalısınız.</translation> </message> </context> <context> <name>PaymentServer</name> <message> <source>Payment request error</source> <translation>Ödeme isteği hatası</translation> </message> <message> <source>URI handling</source> <translation>URI handling</translation> </message> <message> <source>Payment request fetch URL is invalid: %1</source> <translation>Ödeme isteği getirme URL'si geçersiz: %1</translation> </message> <message> <source>Payment request file handling</source> <translation>Ödeme isteği dosyası işleme</translation> </message> <message> <source>Invalid payment address %1</source> <translation>Geçersiz ödeme adresi %1</translation> </message> <message> <source>Cannot start pivx: click-to-pay handler</source> <translation>Pivx başlatılamıyor: tıkla ve ödeme işleyicisi</translation> </message> <message> <source>URI cannot be parsed! This can be caused by an invalid PIVX address or malformed URI parameters.</source> <translation>URI ayrıştırılamaz! Bunun nedeni, geçersiz bir PIVX adresi veya hatalı biçimlendirilmiş URI parametrelerinden kaynaklanabilir.</translation> </message> <message> <source>Payment request file cannot be read! This can be caused by an invalid payment request file.</source> <translation>Ödeme talebi dosyası okunamıyor! Buna, geçersiz bir ödeme istek dosyası neden olabilir.</translation> </message> <message> <source>Payment request rejected</source> <translation>Ödeme talebi reddedildi</translation> </message> <message> <source>Payment request network doesn't match client network.</source> <translation>Ödeme isteği ağı müşteri ağıyla eşleşmiyor.</translation> </message> <message> <source>Payment request has expired.</source> <translation>Ödeme talebi zamanı doldu.</translation> </message> <message> <source>Payment request is not initialized.</source> <translation>Ödeme talebi başlatılmadı.</translation> </message> <message> <source>Unverified payment requests to custom payment scripts are unsupported.</source> <translation>Özel ödeme komut dosyalarına yönelik onaylanmamış ödeme istekleri desteklenmiyor.</translation> </message> <message> <source>Requested payment amount of %1 is too small (considered dust).</source> <translation>İstenen ödeme tutarı %1 çok küçük (toz olarak kabul edildi).</translation> </message> <message> <source>Refund from %1</source> <translation>%1'den geri ödeme</translation> </message> <message> <source>Payment request %1 is too large (%2 bytes, allowed %3 bytes).</source> <translation>Ödeme isteği %1 çok büyük (%2 bayt, %3 bayta izin verildi).</translation> </message> <message> <source>Payment request DoS protection</source> <translation>Ödeme talebi DoS koruması</translation> </message> <message> <source>Error communicating with %1: %2</source> <translation>iletişim kurulurken hata oluştu %1: %2</translation> </message> <message> <source>Payment request cannot be parsed!</source> <translation>Ödeme talebi ayrıştırılamıyor!</translation> </message> <message> <source>Bad response from server %1</source> <translation>%1 sunucusundan gelen yanlış yanıt</translation> </message> <message> <source>Network request error</source> <translation>Ağ isteği hatası</translation> </message> <message> <source>Payment acknowledged</source> <translation>Ödeme kabul edildi</translation> </message> </context> <context> <name>PeerTableModel</name> <message> <source>Address/Hostname</source> <translation>Address/Hostname</translation> </message> <message> <source>Version</source> <translation>Version</translation> </message> <message> <source>Ping Time</source> <translation>Ping Time</translation> </message> </context> <context> <name>PrivacyWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Title</source> <translation>Başlık</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>No active Master Node yet</source> <translation>Henüz aktif Masternode bulunmuyor</translation> </message> <message> <source>Privacy</source> <translation>Gizlilik</translation> </message> <message> <source>Convert</source> <translation>Dönüştür</translation> </message> <message> <source>Mint</source> <translation>Mint</translation> </message> <message> <source>Minting zMXT anonymizes your MXT by removing any transaction history, making transactions untraceable </source> <translation>zMXT mintleme işlemi PIV'lerinizin işlem geçmişini siler, işlemleri takip edilemez yapar</translation> </message> <message> <source>Mint new zMXT or convert back to PIV</source> <translation>Yeni zMXT mintle veya PIV'e geri dönüştür</translation> </message> <message> <source>Last Zerocoin Movements</source> <translation>Son Zerocoin Hareketleri</translation> </message> <message> <source>No transactions yet</source> <translation>İşlem bulunmuyor</translation> </message> <message> <source>Mint zMXT</source> <translation>Mint ZMXT</translation> </message> <message> <source>Enter amount of MXT to mint into zMXT</source> <translation>zMXT'e mintlenmek için dönüştürülecek MXT miktarını gir</translation> </message> <message> <source>Convert back to PIV</source> <translation>PIV'e geri dönüştür</translation> </message> <message> <source>Enter amount of zMXT to convert back into PIV</source> <translation>PIV'e geri dönüştürülecek zMXT miktarını gir</translation> </message> <message> <source>Zerocoin</source> <translation>Zerocoin</translation> </message> <message> <source>zMXT is currently undergoing maintenance</source> <translation>zMXT şu anda bakımdadır</translation> </message> <message> <source>You need to unlock the wallet to be able to %1 zMXT</source> <translation>%1 zMXT yapabilmek için cüzdanınızın kilidini kaldırmalısınız</translation> </message> <message> <source>convert</source> <translation>dönüştür</translation> </message> <message> <source>mint</source> <translation>mint</translation> </message> <message> <source>Invalid value</source> <translation>Geçersiz değer</translation> </message> <message> <source>zMXT minted successfully</source> <translation>zMXT minting başarılı</translation> </message> <message> <source>zMXT converted back to PIV</source> <translation>zMXT PIV'e dönüştürüldü</translation> </message> <message> <source>You don't have any MXT to select.</source> <translation>Seçebileceğiniz PIV'iniz bulunmuyor</translation> </message> <message> <source>Rescan Mints</source> <translation>Mint'leri tekrar tara</translation> </message> <message> <source>Your zerocoin mints are going to be scanned from the blockchain from scratch</source> <translation>Zerocoin mintleriniz sıfırdan blockchain üzerinde taranacaktır</translation> </message> <message> <source>Reset Spent Zerocoins</source> <translation>Zerocoin harcamalarını resetle</translation> </message> <message> <source>Your zerocoin spends are going to be scanned from the blockchain from scratch</source> <translation>Zerocoin mintleriniz sıfırdan blockchain üzerinde taranacaktır</translation> </message> <message> <source>Total %1</source> <translation>Toplam %1</translation> </message> </context> <context> <name>ProposalFrame</name> <message> <source>Open proposal page in browser</source> <translation>Teklif sayfasını tarayıcıda açın</translation> </message> <message> <source> remaining payment(s).</source> <translation>kalan ödeme(ler).</translation> </message> <message> <source>Yes:</source> <translation>Evet:</translation> </message> <message> <source>Abstain:</source> <translation>Çekimser:</translation> </message> <message> <source>No:</source> <translation>Hayır:</translation> </message> <message> <source>A proposal URL can be used for phishing, scams and computer viruses. Open this link only if you trust the following URL. </source> <translation>Bir teklif URL'si şifre avcılığı, dolandırıcılık, bilgisayar virüsü amaçlarıyla kullanılabilir. Linki sadece URL'ye güveniyorsanız açın.</translation> </message> <message> <source>Open link</source> <translation>Link'i aç</translation> </message> <message> <source>Copy link</source> <translation>Link'i kopyala</translation> </message> <message> <source>Wallet Locked</source> <translation>Cüzdan Kilitlendi</translation> </message> <message> <source>You must unlock your wallet to vote.</source> <translation>Oy verebilmek için cüzdanın kilitini açmalısınız.</translation> </message> <message> <source>Do you want to vote %1 on</source> <translation>%1 olarak onaylamak istiyor musunuz?</translation> </message> <message> <source>using all your masternodes?</source> <translation>tüm Masternode'larınızı kullanarak?</translation> </message> <message> <source>Proposal Hash:</source> <translation>Teklif Hash:</translation> </message> <message> <source>Proposal URL:</source> <translation>Teklif URL:</translation> </message> <message> <source>Confirm Vote</source> <translation>Oyu Onayla</translation> </message> <message> <source>Vote Results</source> <translation>Oy Sonuçları</translation> </message> </context> <context> <name>QObject</name> <message> <source>Amount</source> <translation>Miktar</translation> </message> <message> <source>Enter a PIVX address (e.g. %1)</source> <translation>Gir bir PIVX addres (e.g. %1)</translation> </message> <message> <source>%1 d</source> <translation>%1 d</translation> </message> <message> <source>%1 h</source> <translation>%1 h</translation> </message> <message> <source>%1 m</source> <translation>%1 m</translation> </message> <message> <source>%1 s</source> <translation>%1 s</translation> </message> <message> <source>NETWORK</source> <translation>AĞ</translation> </message> <message> <source>BLOOM</source> <translation>BLOOM</translation> </message> <message> <source>ZK_BLOOM</source> <translation>ZK_BLOOM</translation> </message> <message> <source>UNKNOWN</source> <translation>BİLİNMEYEN</translation> </message> <message> <source>None</source> <translation>Hiçbiri</translation> </message> <message> <source>N/A</source> <translation>N/A</translation> </message> <message> <source>%1 ms</source> <translation>%1 ms</translation> </message> <message> <source>PIVX Core</source> <translation>PIVX CORE</translation> </message> <message> <source>Error: Specified data directory "%1" does not exist.</source> <translation>Hata: Belirtilen "%1" dizin bulunmamaktadır.</translation> </message> <message> <source>Error: Cannot parse configuration file: %1. Only use key=value syntax.</source> <translation>Hata: Konfigürasyon dosyası %1 çözümlenemiyor. Sadece anahtar=değer sözdizimini kullanın.</translation> </message> <message> <source>Error: Invalid combination of -regtest and -testnet.</source> <translation>Hata: Geçersiz -regtest ve -testnet kombinasyonu.</translation> </message> <message> <source>Error reading masternode configuration file: %1</source> <translation>Masternode konfigürasyon dosyası okuma hatası: %1</translation> </message> <message> <source>PIVX Core didn't yet exit safely...</source><|fim▁hole|> <translation>PIVX Core henüz güvenle kapanmadı..</translation> </message> </context> <context> <name>RPCConsole</name> <message> <source>Tools window</source> <translation>Araçlar penceresi</translation> </message> <message> <source>&amp;Information</source> <translation>bilgi</translation> </message> <message> <source>General</source> <translation>Genel</translation> </message> <message> <source>Name</source> <translation>isim</translation> </message> <message> <source>Client name</source> <translation>Uygulama adı</translation> </message> <message> <source>N/A</source> <translation>N/A</translation> </message> <message> <source>Number of connections</source> <translation>bağlantı sayısı</translation> </message> <message> <source>&amp;Open</source> <translation>&amp;Açık</translation> </message> <message> <source>Startup time</source> <translation>Başlatma zamanı</translation> </message> <message> <source>Network</source> <translation>Ağ</translation> </message> <message> <source>Last block time</source> <translation>Son Blok Zamanı</translation> </message> <message> <source>Debug log file</source> <translation>Hata ayıklama dosyası</translation> </message> <message> <source>Build date</source> <translation>Yapım tarihi</translation> </message> <message> <source>Current number of blocks</source> <translation>Mevcut Blok Sayısı</translation> </message> <message> <source>Client version</source> <translation>Uygulama versionu</translation> </message> <message> <source>Using BerkeleyDB version</source> <translation>BerkeleyDB version</translation> </message> <message> <source>Block chain</source> <translation>Block chain</translation> </message> <message> <source>Open the PIVX debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Geçerli veri dizininden PIVX hata ayıklama günlük dosyasını açın. Bu, büyük günlük dosyaları için birkaç saniye sürebilir.</translation> </message> <message> <source>Number of Masternodes</source> <translation>Masternodların sayısı</translation> </message> <message> <source>Data Directory</source> <translation>Bilgi Dizini</translation> </message> <message> <source>Last block hash</source> <translation>Son blok hash</translation> </message> <message> <source>&amp;Console</source> <translation>&amp;Konsol</translation> </message> <message> <source>Clear console</source> <translation>Temizle konsolu</translation> </message> <message> <source>&amp;Network Traffic</source> <translation>&amp;Ağ Trafiği</translation> </message> <message> <source>&amp;Clear</source> <translation>&amp;Temizle</translation> </message> <message> <source>Totals</source> <translation>Totals</translation> </message> <message> <source>Received</source> <translation>Alındı</translation> </message> <message> <source>Sent</source> <translation>Gitti</translation> </message> <message> <source>&amp;Peers</source> <translation>&amp;Peers</translation> </message> <message> <source>Banned peers</source> <translation>Engellenmiş taraflar</translation> </message> <message> <source>Select a peer to view detailed information.</source> <translation>Ayrıntılı bilgi için peer seç.</translation> </message> <message> <source>Whitelisted</source> <translation>Güvenli Adres Listesi</translation> </message> <message> <source>Direction</source> <translation>Yön</translation> </message> <message> <source>Protocol</source> <translation>Protokol</translation> </message> <message> <source>Version</source> <translation>Version</translation> </message> <message> <source>Services</source> <translation>Servisler</translation> </message> <message> <source>Ban Score</source> <translation>Ban Score</translation> </message> <message> <source>Connection Time</source> <translation>bağlantı zamanı</translation> </message> <message> <source>Last Send</source> <translation>son gönderi</translation> </message> <message> <source>Last Receive</source> <translation>son alınan</translation> </message> <message> <source>Bytes Sent</source> <translation>Gönderilen Byte</translation> </message> <message> <source>Bytes Received</source> <translation>Alınan Byte</translation> </message> <message> <source>Ping Time</source> <translation>Ping Time</translation> </message> <message> <source>&amp;Wallet Repair</source> <translation>&amp;Cüzdan Tamiri</translation> </message> <message> <source>Delete local Blockchain Folders</source> <translation>Yerel Blockchain Klasörlerini Sil</translation> </message> <message> <source>Wallet In Use:</source> <translation>Kullanımdaki Cüzdan:</translation> </message> <message> <source>Starting Block</source> <translation>Başlangıç bloğu</translation> </message> <message> <source>Synced Headers</source> <translation>Senkronize Edilen Başlıklar</translation> </message> <message> <source>Synced Blocks</source> <translation>Senkronize Edilmiş Bloklar</translation> </message> <message> <source>The duration of a currently outstanding ping.</source> <translation>Mevcut tamamlanmamış ping süresi</translation> </message> <message> <source>Ping Wait</source> <translation>Ping Bekleme</translation> </message> <message> <source>Time Offset</source> <translation>Zaman Farkı</translation> </message> <message> <source>Custom Backup Path:</source> <translation>Özel Yedekleme Yolu:</translation> </message> <message> <source>Custom zMXT Backup Path:</source> <translation>Özel zMXT Yedekleme Yolu:</translation> </message> <message> <source>Custom Backups Threshold:</source> <translation>Belirli Yedek Eşiği: </translation> </message> <message> <source>Salvage wallet</source> <translation>Kurtarma cüzdanı</translation> </message> <message> <source>Attempt to recover private keys from a corrupt wallet.dat.</source> <translation>Bozuk bir wallet.dat'tan özel anahtarları kurtarmayı deneyin.</translation> </message> <message> <source>Rescan blockchain files</source> <translation>Blockchain dosyalarını tekrar yükleyin</translation> </message> <message> <source>Rescan the block chain for missing wallet transactions.</source> <translation>Eksik cüzdan işlemleri için blockchaini yeniden tarayın</translation> </message> <message> <source>Recover transactions 1</source> <translation>İşemleri kurtarın 1</translation> </message> <message> <source>Recover transactions from blockchain (keep meta-data, e.g. account owner).</source> <translation>Blockchainden işlemleri kurtarın ( meta veri ve hesap sahibi gibi koru)</translation> </message> <message> <source>Recover transactions 2</source> <translation>İşlemleri kurtar 2</translation> </message> <message> <source>Recover transactions from blockchain (drop meta-data).</source> <translation>İşlemleri blok zincirini kullanarak kurtarın (meta verileri bırakın).</translation> </message> <message> <source>Upgrade wallet format</source> <translation>Cüzdan formatını güncelleyin</translation> </message> <message> <source>Rebuild block chain index from current blk000??.dat files.</source> <translation>Blok zinciri dizinini mevcut blk000 ?? dat dosyalarından yeniden oluştur.</translation> </message> <message> <source>-resync:</source> <translation>-yeniden güncelle:</translation> </message> <message> <source>Deletes all local blockchain folders so the wallet synchronizes from scratch.</source> <translation>Cüzdan sıfırdan eşleştirmek için tüm yerel blok zincir klasörlerini siler.</translation> </message> <message> <source>The buttons below will restart the wallet with command-line options to repair the wallet, fix issues with corrupt blockhain files or missing/obsolete transactions.</source> <translation>Aşağıdaki düğmeler cüzdanı onarmak, bozuk blokaj dosyalarıyla ilgili sorunları çözmek veya eksik / eskimiş işlemler için komut satırı seçenekleriyle cüzdan yeniden başlatacaktır.</translation> </message> <message> <source>Wallet repair options.</source> <translation>Cüzdan onarım seçenekleri.</translation> </message> <message> <source>Upgrade wallet to latest format on startup. (Note: this is NOT an update of the wallet itself!)</source> <translation>Cüzdanı başlangıçta en yeni formata yükseltin. (Not: Bu, cüzdanın kendisinin güncellemesi değildir!)</translation> </message> <message> <source>Rebuild index</source> <translation>Dizini yeniden oluştur</translation> </message> <message> <source>In:</source> <translation>İçe:</translation> </message> <message> <source>Out:</source> <translation>Dışa:</translation> </message> <message> <source>Welcome to the PIVX RPC console.</source> <translation>PIVX RPC konsola hoşgeldiniz</translation> </message> <message> <source>&amp;Disconnect Node</source> <translation>&amp;Düğümü Kopar</translation> </message> <message> <source>Ban Node for</source> <translation>Düğümü egelle</translation> </message> <message> <source>1 &amp;hour</source> <translation>1 &amp;saat</translation> </message> <message> <source>1 &amp;day</source> <translation>1 &amp;gün</translation> </message> <message> <source>1 &amp;week</source> <translation>1 &amp;hafta</translation> </message> <message> <source>1 &amp;year</source> <translation>1 &amp;yıl</translation> </message> <message> <source>&amp;Unban Node</source> <translation>&amp;Düğüm engelini kaldır</translation> </message> <message> <source>This will delete your local blockchain folders and the wallet will synchronize the complete Blockchain from scratch.&lt;br /&gt;&lt;br /&gt;</source> <translation>Bu, yerel blok zincir klasörlerini silecek ve cüzdan tüm Blockchain'i sıfırdan senkronize edecektir..&lt;br /&gt;&lt;br /&gt;</translation> </message> <message> <source>This needs quite some time and downloads a lot of data.&lt;br /&gt;&lt;br /&gt;</source> <translation>Bu biraz zaman alacak ve birçok veri indirecektir.&lt;br /&gt;&lt;br /&gt;</translation> </message> <message> <source>Your transactions and funds will be visible again after the download has completed.&lt;br /&gt;&lt;br /&gt;</source> <translation>İşlemleriniz ve bakiyeniz indirme tamamıyle tamamlandıktan sonra tekrar görünür olacaktır.&lt;br /&gt;&lt;br /&gt;</translation> </message> <message> <source>Do you want to continue?.&lt;br /&gt;</source> <translation>Devam etmek istiyor musunuz?.&lt;br /&gt;</translation> </message> <message> <source>Confirm resync Blockchain</source> <translation>Blockchaini resenkronize etmeyi onaylayın</translation> </message> <message> <source>Use up and down arrows to navigate history, and %1 to clear screen.</source> <translation>Aşağı yukarı tuşlarıyla tarihçede gezebilirsiniz, ekranı temizlemek için %1</translation> </message> <message> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Yazın&lt;b&gt;help&lt;/b&gt; mevcut komutaları görmek için</translation> </message> <message> <source>WARNING: Scammers have been active, telling users to type commands here, stealing their wallet contents. Do not use this console without fully understanding the ramifications of a command.</source> <translation>DİKKAT: Dolandırıcılar aktif çalışıyor, kullanıcılardan buraya komut girmelerini istiyor ve cüzdanlarını çalıyorlar. Bu konsolu kullanacağınız komutun etkilerini tam olarak anlamadan kullanmayın.</translation> </message> <message> <source>%1 B</source> <translation>%1 B</translation> </message> <message> <source>%1 KB</source> <translation>%1 KB</translation> </message> <message> <source>%1 MB</source> <translation>%1 MB</translation> </message> <message> <source>%1 GB</source> <translation>%1 GB</translation> </message> <message> <source>(node id: %1)</source> <translation>(node id: %1)</translation> </message> <message> <source>via %1</source> <translation>ile %1</translation> </message> <message> <source>never</source> <translation>asla</translation> </message> <message> <source>Inbound</source> <translation>Gelen</translation> </message> <message> <source>Outbound</source> <translation>Giden</translation> </message> <message> <source>Yes</source> <translation>Evet</translation> </message> <message> <source>No</source> <translation>Hayır</translation> </message> <message> <source>Unknown</source> <translation>Bilinmeyen</translation> </message> </context> <context> <name>ReceiveDialog</name> <message> <source>Dialog</source> <translation>Diyalog</translation> </message> <message> <source>Transaction Details</source> <translation>İşlem Detayları</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>D7VFR83SQbiezrW72hjcWJtcfip5krte2Z </source> <translation>D7VFR83SQbiezrW72hjcWJtcfip5krte2Z </translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> </context> <context> <name>ReceiveWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>No address</source> <translation>Adres bulunmuyor</translation> </message> <message> <source>Add Label</source> <translation>Etiket ekle</translation> </message> <message> <source>Generate Address</source> <translation>Adres Oluştur</translation> </message> <message> <source>Copy</source> <translation>Kopyala</translation> </message> <message> <source>Receive</source> <translation>Al</translation> </message> <message> <source>Scan the QR code or copy the address to receive PIV.</source> <translation>PIVX almak için QR kodunu okutun veya adresi kopyalayın</translation> </message> <message> <source>No address </source> <translation>Adres bulunmuyor</translation> </message> <message> <source>No available address, try unlocking the wallet</source> <translation>Adres bulunmuyor, cüzdan kilidini açarak deneyin</translation> </message> <message> <source>Error generating address</source> <translation>Adres oluşturma hatası</translation> </message> <message> <source>Change Label</source> <translation>Etiketi Değiştirin</translation> </message> <message> <source>Edit Address Label</source> <translation>Adres Etiketini güncelleyin</translation> </message> <message> <source>Address label saved</source> <translation>Adres etiketi kaydedildi</translation> </message> <message> <source>Error storing address label</source> <translation>Adres etiketini kaydederken hata oluştu</translation> </message> <message> <source>New address created</source> <translation>Yeni adres oluşturuldu</translation> </message> <message> <source>Address copied</source> <translation>Adres kopyalandı</translation> </message> <message> <source>URI copied to clipboard</source> <translation>URI panoya kopyalandı</translation> </message> <message> <source>Address copied to clipboard</source> <translation>Adres panoya kopyalandı</translation> </message> </context> <context> <name>RecentRequestsTableModel</name> <message> <source>Date</source> <translation>Tarih</translation> </message> <message> <source>Label</source> <translation>Etiket</translation> </message> <message> <source>Message</source> <translation>Mesaj</translation> </message> <message> <source>Address</source> <translation>Adres</translation> </message> <message> <source>Amount</source> <translation>Miktar</translation> </message> <message> <source>(no label)</source> <translation>(etiket yok)</translation> </message> <message> <source>(no message)</source> <translation>(mesaj yok)</translation> </message> <message> <source>(no amount)</source> <translation>(miktar yok)</translation> </message> </context> <context> <name>RequestDialog</name> <message> <source>Dialog</source> <translation>Diyalog</translation> </message> <message> <source>New Request Payment</source> <translation>Yeni Ödeme Talebi</translation> </message> <message> <source>Lorem ipsum dolor sit amet, consectur cling elit, sed do </source> <translation>Lorem ipsum dolor sit amet, consectur cling elit, sed do </translation> </message> <message> <source>Amount</source> <translation>Miktar</translation> </message> <message> <source>PIV</source> <translation>PIV</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>D7VFR83SQbiezrW72hjcWJtcfip5krte2Z </source> <translation>D7VFR83SQbiezrW72hjcWJtcfip5krte2Z </translation> </message> <message> <source>COPY ADDRESS</source> <translation>ADRES KOPYALA</translation> </message> <message> <source>COPY URL</source> <translation>URL KOPYALA</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>REQUEST</source> <translation>TALEP ET</translation> </message> <message> <source>Instead of only sharing a PIVX address, you can create a Payment Request message which bundles up more information than is contained in just a PIVX address.</source> <translation>PIVX adresi paylaşmak yerine, PIVX adresinden daha fazla bilgi içeren bir Ödeme Talebi oluşturabilirsin.</translation> </message> <message> <source>Label</source> <translation>Etiket</translation> </message> <message> <source>Enter a label to be saved within the address</source> <translation>Adresin içine kaydedilecek bir etiket belirle</translation> </message> <message> <source>Description (optional)</source> <translation>Açıklama (opsiyonel)</translation> </message> <message> <source>Add description </source> <translation>Açıklama ekle</translation> </message> <message> <source>Error</source> <translation>Hata</translation> </message> <message> <source>GENERATE</source> <translation>OLUŞTUR</translation> </message> <message> <source>Creates an address to receive coin delegations and be able to stake them.</source> <translation>Koin delegasyonlarını almak ve stake edebilmek için adres yaratır.</translation> </message> <message> <source>New Cold Staking Address</source> <translation>Yeni Stakeleme Adresi</translation> </message> </context> <context> <name>SendChangeAddressDialog</name> <message> <source>Dialog</source> <translation>Diyalog</translation> </message> <message> <source>Transaction Details</source> <translation>İşlem Detayları</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim.</source> <translation>Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim.</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> <message> <source>Custom Change Address</source> <translation>Özel fark adresi</translation> </message> <message> <source>The remainder of the value resultant from the inputs minus the outputs value goes to the "change" PIVX address</source> <translation>Giriş ve çıkış değerleri arasındaki kalan tutar "fark" MXT adresine gider</translation> </message> </context> <context> <name>SendCustomFeeDialog</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Transaction Details</source> <translation>İşlem Detayları</translation> </message> <message> <source>Customize the transaction fee at your to your liking, depending on the fee value your transaction will be included or not in the blockchain.</source> <translation>İsteğiniz gibi transfer masrafını değiştirebilirsiniz, belirlediğiniz değere göre blockchaine yazılır veya yazılmaz. </translation> </message> <message> <source>Recomended</source> <translation>Tavsiye edilir</translation> </message> <message> <source>0.00 KB</source> <translation>0.00 KB</translation> </message> <message> <source>Custom</source> <translation>Özel</translation> </message> <message> <source>Per kilobyte</source> <translation>Kilobyte başına</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> <message> <source>Customize Fee</source> <translation>Masrafı değiştir</translation> </message> <message> <source>Customize the transaction fee, depending on the fee value your transaction will be included or not in the blockchain.</source> <translation>İsteğiniz gibi transfer masrafını değiştirebilirsiniz, belirlediğiniz değere göre blockchaine yazılır veya yazılmaz. </translation> </message> <message> <source>Normal</source> <translation>Normal</translation> </message> <message> <source>Slow</source> <translation>Yavaş</translation> </message> <message> <source>Fast</source> <translation>Hızlı</translation> </message> <message> <source>SAVE</source> <translation>KAYDET</translation> </message> </context> <context> <name>SendMultiRow</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>Add address</source> <translation>Adres ekle</translation> </message> <message> <source>Add description</source> <translation>Açıklama ekle</translation> </message> </context> <context> <name>SendWidget</name> <message> <source>Send</source> <translation>Gönder</translation> </message> <message> <source>You can transfer public coins (PIV) or private coins (zMXT)</source> <translation>Açık koin MXT veya gizli koin zMXT transfer edebilirsiniz</translation> </message> <message> <source>Select coin type to spend</source> <translation>Harcayacağınız koin tipini seçin</translation> </message> <message> <source>Enter a PIVX address or contact label</source> <translation>Bir PIVX adresi veya bağlantı etiketi girin</translation> </message> <message> <source>Amount</source> <translation>Miktar</translation> </message> <message> <source>Customize fee</source> <translation>Masrafı değiştir</translation> </message> <message> <source>Clear all</source> <translation>Hepsini temizle</translation> </message> <message> <source>Add recipient</source> <translation>Alıcı Ekle</translation> </message> <message> <source>Reset to default</source> <translation>Varsayılana konumlandır</translation> </message> <message> <source>Total to send</source> <translation>Gönderilecek toplam</translation> </message> <message> <source>Send PIV</source> <translation>PIV Gönder</translation> </message> <message> <source>Send zMXT</source> <translation>zMXT gönder</translation> </message> <message> <source>Total Remaining (Selected UTXO)</source> <translation>Seçili UTXO'dan kalan toplam</translation> </message> <message> <source>Total Remaining</source> <translation>Toplam kalan</translation> </message> <message> <source>Customize Fee</source> <translation>Masrafı değiştir</translation> </message> <message> <source>Maximum amount of outputs reached</source> <translation>Maksimum çıktı miktarına ulaşıldı</translation> </message> <message> <source>Invalid entry, previous entries must be valid before add a new one</source> <translation>Geçersiz giriş, yeni giriş yapabilmek için önceki girişlerin geçerli olması gerekmektedir.</translation> </message> <message> <source>Invalid entry</source> <translation>Geçersiz giriş</translation> </message> <message> <source>No set recipients</source> <translation>Alıcı belirlenmedi</translation> </message> <message> <source>Cannot send, wallet locked</source> <translation>Gönderilemez, cüzdan kilitli</translation> </message> <message> <source>Cannot create transaction.</source> <translation>İşlem yaratılamıyor.</translation> </message> <message> <source>WARNING: Transaction spends a cold-stake delegation, voiding it. These coins will no longer be cold-staked.</source> <translation>UYARI:: Transfer soğuk stakleme delegasyonunu harcıyor, onu boşaltıyor. Bu koinler artık soğuk stakelenmeyecektir.</translation> </message> <message> <source>Transaction sent</source> <translation>Ödeme gönderildi</translation> </message> <message> <source>Spend Zerocoin</source> <translation>Harca Zerocoin</translation> </message> <message> <source>zMXT is currently undergoing maintenance.</source> <translation>zMXT halen bakım alıyor.</translation> </message> <message> <source>Coin control mint not found</source> <translation>Koin kontrolü min bulunamadı</translation> </message> <message> <source>Sending %1 to address %2 </source> <translation>%1 adresinden %2 adresine gönderiliyor </translation> </message> <message> <source>Sending %1 to addresses: %2</source> <translation>%1 adresinden %2 adreslerine gönderiliyor</translation> </message> <message> <source>zMXT transaction sent!</source> <translation>zMXT ödemesi gönderildi</translation> </message> <message> <source>Version 1 zMXT require a security level of 100 to successfully spend.</source> <translation>Versiyon 1 olan zMXT'lerin başarıyla harcanabilmesi için güvenlik seviyesinin 100 olması gerekiyor.</translation> </message> <message> <source>Too much inputs (</source> <translation>Çok fazla girdi (</translation> </message> <message> <source>) needed. Maximum allowed: </source> <translation>) gerekli. Maksimum bırakılan: </translation> </message> <message> <source> Either mint higher denominations (so fewer inputs are needed) or reduce the amount to spend.</source> <translation> Ya daha yüksek mezhepleri daraltın (daha az girdi gereklidir) veya harcama miktarını azaltın.</translation> </message> <message> <source>Address label update failed for address: %1</source> <translation>%1 adresi için adres etiketi güncellemesi başarısız oldu</translation> </message> <message> <source>Invalid change address</source> <translation>Geçersiz fark adresi</translation> </message> <message> <source>Invalid URI</source> <translation>Geçersiz URI</translation> </message> <message> <source>Invalid address in URI</source> <translation>URI içinde geçersiz adres</translation> </message> <message> <source>Custom Fee %1</source> <translation>Belirlenmiş masraf %1</translation> </message> <message> <source>You don't have any MXT to select.</source> <translation>Seçebileceğiniz PIV'iniz bulunmuyor</translation> </message> <message> <source>You don't have any zMXT in your balance to select.</source> <translation>Bakiyenizde seçebileceğiniz zMXT'iniz bulunmuyor</translation> </message> <message> <source>No contacts available, you can go to the contacts screen and add some there!</source> <translation>Uygun adres bulunmuyor, AL ekranına giderek adres ekleyebilirsin.</translation> </message> <message> <source>Save contact</source> <translation>Bağlantıyı kaydet</translation> </message> <message> <source>Address field is empty</source> <translation>Adres alanı boş</translation> </message> <message> <source>Invalid address</source> <translation>Geçersiz adres</translation> </message> <message> <source>Cannot store your own address as contact</source> <translation>Kendi adresinizi bağlantı adresi olarak kaydedemezsiniz.</translation> </message> <message> <source>Update Contact</source> <translation>Bağlantıyı güncelle</translation> </message> <message> <source>Create New Contact</source> <translation>Yeni Bağlantı Ekle</translation> </message> <message> <source>New Contact Stored</source> <translation>Yeni Bağlantı Kaydedildi</translation> </message> <message> <source>Error Storing Contact</source> <translation>Bağlantı Kayıt Hatası</translation> </message> </context> <context> <name>SettingsBackupWallet</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>Backup Wallet </source> <translation>Cüzdanı Yedekle</translation> </message> <message> <source>Change Wallet Passphrase</source> <translation>Cüzdan Parolasını Değiştir</translation> </message> <message> <source>Keep your wallet safe doing regular backups, store your backup file externally. This option creates a wallet.dat file that can be used to recover your whole balance (transactions and addresses) from another device.</source> <translation>Düzenli yedekler alarak cüzdanınızı güvende tutun, yedeklerinizi dışarıda saklayın. Bu opsiyon tüm bakiyenizi (işlemleri ve adresleri) kurtarmak için kullanacağınız yeni bir wallet.dat dosyası yaratır .</translation> </message> <message> <source>This will decrypt the whole wallet data and encrypt it back with the new passphrase. Remember to write it down and store it safely, otherwise you might lose access to your funds.</source> <translation>Bu işlem tüm bilginin şifresini çözecek ve yeni parola ile şifreleyecektir. Paranıza erişiminizi kaybetmemek için yazmayı ve güvenli bir yerde saklamayı unutmayın.</translation> </message> <message> <source>Where</source> <translation>Yer</translation> </message> <message> <source>Set a folder location</source> <translation>Bir dosya klasör yeri belirleyin</translation> </message> <message> <source>Backup</source> <translation>Yedek</translation> </message> <message> <source>Change Passphrase</source> <translation>Parolayı Değiştir</translation> </message> <message> <source>Backup Wallet</source> <translation>Cüzdanı Yedekle</translation> </message> <message> <source>Wallet Data (*.dat)</source> <translation>Wallet Data (*.dat)</translation> </message> <message> <source>Backup created</source> <translation>Yedek oluşturuldu</translation> </message> <message> <source>Backup creation failed</source> <translation>Yedek oluşturma başarısız</translation> </message> <message> <source>Please select a folder to export the backup first.</source> <translation>Yedeğinizi aktarmak için önce bir klasör belirleyin</translation> </message> </context> <context> <name>SettingsBitToolWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>Decrypt Address Result</source> <translation>Adres Şifre Çözüm Sonucu</translation> </message> <message> <source>Import Address</source> <translation>Adres içe aktar</translation> </message> <message> <source>Clear</source> <translation>temizle</translation> </message> <message> <source>BIP38 Tool</source> <translation>BIP38 Aracı</translation> </message> <message> <source>Encrypt</source> <translation>Şifrele</translation> </message> <message> <source>Decrypt</source> <translation>Şifreyi Çöz</translation> </message> <message> <source>Encrypted key</source> <translation>Şifrelenmiş anahtar</translation> </message> <message> <source>Enter a encrypted key</source> <translation>Bir şifrelenmiş anahtar gir</translation> </message> <message> <source>Passphrase</source> <translation>Parola</translation> </message> <message> <source>Enter a passphrase </source> <translation>Bir Parola gir</translation> </message> <message> <source>Decrypted address result</source> <translation>Adres Şifre Çözüm Sonucu</translation> </message> <message> <source>Decrypted Address</source> <translation>Şifresi Çözülmüş Adres</translation> </message> <message> <source>DECRYPT KEY</source> <translation>ŞİFRE ÇÖZME ANAHTARI</translation> </message> <message> <source>Enter a PIVX address</source> <translation>Bir PIVX adresi girin</translation> </message> <message> <source>Add address</source> <translation>Adres ekle</translation> </message> <message> <source>Write a message</source> <translation>Bir mesaj yaz</translation> </message> <message> <source>Encrypted Key</source> <translation>Şifrelenmiş Anahtar</translation> </message> <message> <source>ENCRYPT</source> <translation>ŞİFRELE</translation> </message> <message> <source>CLEAR ALL</source> <translation>TEMİZLE</translation> </message> <message> <source>CLEAR</source> <translation>TEMİZLE</translation> </message> <message> <source>The entered passphrase is invalid. </source> <translation>Girilen şifre geçersiz.</translation> </message> <message> <source>Allowed: 0-9,a-z,A-Z,</source> <translation>İzinli: 0-9,a-z,A-Z,</translation> </message> <message> <source>The entered address is invalid.</source> <translation>Girilen adres geçersiz.</translation> </message> <message> <source>Please check the address and try again.</source> <translation>Lütfen adresini kontrol edin ve tekrar deneyin.</translation> </message> <message> <source>The entered address does not refer to a key.</source> <translation>Girilen adres bir tuşa atıfta bulunmaz.</translation> </message> <message> <source>Wallet unlock was cancelled.</source> <translation>Cüzdan kilidi açılması iptal edildi.</translation> </message> <message> <source>Private key for the entered address is not available.</source> <translation>Girilen adres için özel anahtar kullanılamıyor.</translation> </message> <message> <source>Address encrypted.</source> <translation>Adres şifrelendi.</translation> </message> <message> <source>No addresses available, you can go to the receive screen and add some there!</source> <translation>Uygun adres bulunmuyor, AL ekranına giderek adres ekleyebilirsin.</translation> </message> <message> <source>Failed to decrypt.</source> <translation>Şifre çözme başarısız oldu.</translation> </message> <message> <source>Please check the key and passphrase and try again.</source> <translation>Lütfen anahtarı ve parolayı kontrol edin ve tekrar deneyin</translation> </message> <message> <source>Data Not Valid.</source> <translation>Veriler Geçersiz.</translation> </message> <message> <source>Please try again.</source> <translation>Lütfen tekrar deneyin.</translation> </message> <message> <source>Please wait while key is imported</source> <translation>Anahtar içe aktarılırken lütfen bekleyin</translation> </message> <message> <source>Cannot import address, key already held by the wallet</source> <translation>Adres içe aktarılamıyor, anahtar hala cüzdanın içinde</translation> </message> <message> <source>Error adding key to the wallet</source> <translation>Cüzdana anahtar ekleme hatası</translation> </message> <message> <source>Successfully added pivate key to the wallet</source> <translation>Özel cüzdana anahtar başarıyla eklendi</translation> </message> </context> <context> <name>SettingsConsoleWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>Console</source> <translation>Konsol</translation> </message> <message> <source>Console input</source> <translation>Konsol Girişi</translation> </message> <message> <source>Command Line Options </source> <translation>Komut Satırı Seçenekleri</translation> </message> <message> <source>Open Debug File</source> <translation>Hata Ayıklama Dosyasını Aç</translation> </message> <message> <source>Cannot open debug file. Verify that you have installed a predetermined text editor.</source> <translation>Hata ayıklama dosyası açılamadı. Önceden belirlenmiş bir text editleme programınız olup olmadığını kontrol ediniz.</translation> </message> <message> <source>Welcome to the PIVX RPC console.</source> <translation>PIVX RPC konsola hoşgeldiniz</translation> </message> <message> <source>Use up and down arrows to navigate history, and %1 to clear screen.</source> <translation>Aşağı yukarı tuşlarıyla tarihçede gezebilirsiniz, ekranı temizlemek için %1</translation> </message> <message> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Yazın&lt;b&gt;help&lt;/b&gt; mevcut komutaları görmek için</translation> </message> <message> <source>WARNING: Scammers have been active, telling users to type commands here, stealing their wallet contents. Do not use this console without fully understanding the ramifications of a command.</source> <translation>DİKKAT: Dolandırıcılar aktif çalışıyor, kullanıcılardan buraya komut girmelerini istiyor ve cüzdanlarını çalıyorlar. Bu konsolu kullanacağınız komutun etkilerini tam olarak anlamadan kullanmayın.</translation> </message> </context> <context> <name>SettingsDisplayOptionsWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>Reset to default</source> <translation>Varsayılana konumlandır</translation> </message> <message> <source>Discard changes</source> <translation>Değişikliklerden vazgeç</translation> </message> <message> <source>Display</source> <translation>Görüntü</translation> </message> <message> <source>Customize the display view options</source> <translation>Görünüm ve seçeneklerini belirleyin</translation> </message> <message> <source>Language</source> <translation>Dil</translation> </message> <message> <source>Unit to show amount</source> <translation>Tutarların gösterileceği birim:</translation> </message> <message> <source>Decimal digits</source> <translation>Ondalık basamak</translation> </message> <message> <source>Third party transactions URLs</source> <translation>Üçüncü taraf işlem URL'si</translation> </message> <message> <source>Hide empty balances</source> <translation>Boş bakiyeleri sakla</translation> </message> <message> <source>SAVE</source> <translation>KAYDET</translation> </message> <message> <source>default</source> <translation>halihazırda</translation> </message> <message> <source>Options reset succeed</source> <translation>Opsiyonları sıfırlama başarılı</translation> </message> </context> <context> <name>SettingsFaqWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>1) What is PIVX?</source> <translation>1)PIVX nedir?</translation> </message> <message> <source>2) Why are my MXT unspendable?</source> <translation>2) PIV'lerim neden harcanabilir değil?</translation> </message> <message> <source>3) PIVX privacy? What is Zerocoin (zMXT)?</source> <translation>3)PIVX gizliliği? Zerocoin (zMXT) nedir?</translation> </message> <message> <source>4) Why are my zMXT unspendable?</source> <translation>4) zMXT'lerim neden harcanabilir değil?</translation> </message> <message> <source>5) Why did my wallet convert the balance into zMXT automatically?</source> <translation>5)Neden cüzdanım bakiyemi otomatik olarak zMXT'e dönüştürdü?</translation> </message> <message> <source>6) How do I receive PIV/zMXT?</source> <translation>6) Nasıl PIV/zMXT alabilirim?</translation> </message> <message> <source>7) How do I stake PIV/zMXT?</source> <translation>7) Nasıl PIV/zMXT staking yapabilirim?</translation> </message> <message> <source>8) Where I should go if I need support?</source> <translation>8) Yardıma ihtiyacım olduğunda nereye gitmeliyim?</translation> </message> <message> <source>9) What is a Master Node?</source> <translation>9) Masternode nedir?</translation> </message> <message> <source>10) What is a Master Node Controller?</source> <translation>10) Masternode kontrolörü nedir?</translation> </message> <message> <source>1</source> <translation>1</translation> </message> <message> <source>What is PIVX?</source> <translation>PIVX Nedir?</translation> </message> <message> <source> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; PIVX is a form of digital online money using blockchain technology that can be easily transferred globally, instantly, and with near zero fees. PIVX incorporates market leading security &amp; privacy and is also the first PoS (Proof of Stake) Cryptocurrency to implement ZeroCoin(zMXT) and Zerocoin staking. &lt;/p&gt;&lt;p align="justify"&gt; PIVX utilizes a Proof of Stake (PoS) consensus system algorithm, allowing all owners of PIVX to participate in earning block rewards while securing the network with full node wallets, as well as to run Masternodes to create and vote on proposals. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </source> <translation> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; PIVX blockchain teknolojisini kullanan bir dijital online para birimidir global olarak anında ve sıfıra yakın ücretlerle transfer edilebilir PIVX sektördeki en ileri güvenlik ve gizliliğe sahip olmakla birlikte Zerocoin (zMXT) ve Zerocoin staking özelliklerini uygulayan ilk PoS (Proof of Stake) Kriptoparadır. &lt;/p&gt;&lt;p align="justify"&gt; PIVX bir Proof of Stake (PoS) konsensüs algoritma sistemi kullanmaktadır, bu sayede PIVX sahipleri blok ödülü kazanımına katılabilmektedir ağı diğer tam düğüm sahibi cüzdanlarla birlikte güvenli hale getirirler ayrıca Masternode oluşturarak tekliflere oy verirler. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </translation> </message> <message> <source>2</source> <translation>2</translation> </message> <message> <source>Why are my MXT unspendable?</source> <translation>PIV'lerim neden harcanabilir değil?</translation> </message> <message> <source> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; Newly received PIVX requires 6 confirmations on the network to become eligible for spending which can take ~6 minutes. &lt;/p&gt;&lt;p align="justify"&gt; Your PIVX wallet also needs to be completely synchronized to see and spend balances on the network. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </source> <translation> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; Yeni alınmış PIVX'lerin harcanabilr hale gelmesi için ağdan 6 onay alması gerekmektedir, bu işlem yaklaşık olarak 6 dakika sürer. &lt;/p&gt;&lt;p align="justify"&gt; PIVX cüzdanınızın ağdaki bakiyesini görebilmesi ve harcayabilmesi için tamamen senkronize olması gerekmektedir. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </translation> </message> <message> <source>3</source> <translation>3</translation> </message> <message> <source>PIVX privacy? What is Zerocoin (zMXT)?</source> <translation>PIVX gizliliği? Zerocoin (zMXT) nedir?</translation> </message> <message> <source> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; zMXT is an optional privacy-centric method of coin mixing on the PIVX blockchain. Basically all your transactions cannot be tracked on to any block explorer. You can read more about the technicals in the &lt;a style='color: #b088ff' href='https://PIVX.org/zmxt/'&gt; "PIVX Zerocoin (zMXT) Technical Paper"&lt;/a&gt;. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </source> <translation> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; zMXT PIVX blockchaini üzerinde koinleri karıştırmak için kullanılan gizlilik-odaklı opsiyonel bir metoddur. Temel olarak işlemleriniz hiçbir tarayıcı tarafından takip edilemez. Teknik detayları hakkında daha çok bilgi almak için &lt;a style='color: #b088ff' href='https://PIVX.org/zmxt/'&gt; "PIVX Zerocoin (zMXT) Teknik Makalesi"&lt;/a&gt;. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </translation> </message> <message> <source>4</source> <translation>4</translation> </message> <message> <source>Why are my zMXT unspendable?</source> <translation>zMXT'lerim neden harcanabilir değil?</translation> </message> <message> <source> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; After minting, zMXT will require 20 confirmations as well as 1 additional mint of the same denomination on the network to become eligible for spending. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </source> <translation> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; Mint işlemi sonrasında, zMXT'lerin harcanabilir duruma gelmesi için 20 onay alması ve kendisiyle aynı bölüme sahip fazladan 1 mint'e ihtiyaç vardır. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </translation> </message> <message> <source>5</source> <translation>5</translation> </message> <message> <source>Why did my wallet convert the balance into zMXT automatically?</source> <translation>Neden cüzdanım bakiyemi otomatik olarak zMXT'e dönüştürdü?</translation> </message> <message> <source> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; By default the PIVX wallet will convert 10% of your entire PIV balance to zMXT to assist the network. If you do not wish to stake zMXT or take advantage of the privacy benefit it brings, you can disable the automatic minting in your PIVX wallet by going to Settings-&gt;Options and deselecting “Enable zMXT Automint”. If you are not making use of the PIVX-QT or GUI you can simply open your pivx.conf file and add &lt;i&gt;enablezeromint=0&lt;/i&gt; Without the quotation marks and restart your wallet to disable automint.&lt;/p&gt; &lt;/p&gt;&lt;p align="justify"&gt; You can read more about zMXT in the &lt;a style='color: #b088ff' href='https://PIVX.org/zmxt/'&gt; "PIVX Zerocoin (zMXT) Technical Paper"&lt;/a&gt;. If you would like to keep and stake your zMXT, please read the "How do I stake" section of the FAQ below. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </source> <translation> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; PIVX cüzdanınız ağa destek olmak için varsayılan olarak tüm MXT bakiyenizin 10% 'unu zMXT'e dönüştürecektir. Getirdiği gizlilik avantajını istemiyor veya zMXT stakelemek istemiyorsanız cüzdanınızın otomint özelliğini Ayarlar-&gt;Seçenekler içindeki “zMXT Otomint etkin” seçeneğini değiştirerek devredışı bırakabilirsiniz. PIVXQT veya GUI kullanmıyorsanız, pivx.conf dosyası içine &lt;i&gt;enablezeromint=0&lt;/i&gt; ifadesini tırnak işareti olmadan girerek devredışı bırakabilirsiniz. &lt;/p&gt; &lt;/p&gt;&lt;p align="justify"&gt; Teknik detayları hakkında daha çok bilgi almak için &lt;a style='color: #b088ff' href='https://PIVX.org/zmxt/'&gt; "PIVX Zerocoin (zMXT) Teknik Makalesi"&lt;/a&gt;. zMXT'lerinizi tutmak ve stakelemek istiyorsanız, lütfen SSS kısmındaki "Nasıl Stake yaparım" kısmını okuyunuz. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </translation> </message> <message> <source>6</source> <translation>6</translation> </message> <message> <source>How do I receive PIV/zMXT?</source> <translation>Nasıl PIV/zMXT alabilirim?</translation> </message> <message> <source> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; zMXT can be spent and sent to any PIVX address. The receiver will receive standard PIVX but the origin of the PIVX is anonymized by the zMXT Protocol. &lt;/p&gt;&lt;p align="justify"&gt; If you want more zMXT you will need to mint your balance in the “Privacy” tab. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </source> <translation> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; zMXT herhangi bir PIVX adresine gönderilerek harcanabilir. Alıcı standart PIVX alacaktır fakat PIVX kaynağı zMXT Protokolü ile anonimleştirilmiş olacaktır. &lt;/p&gt;&lt;p align="justify"&gt; Daha fazla zMXT istiyorsanız, "Gizlilik" tabından bakiyenizi mintlemeniz gerekecektir. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </translation> </message> <message> <source>7</source> <translation>7</translation> </message> <message> <source>How do I stake PIV/zMXT?</source> <translation>Nasıl PIV/zMXT staking yapabilirim?</translation> </message> <message> <source> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; To Stake PIVX: &lt;/p&gt;&lt;p align="justify"&gt; &lt;ol&gt;&lt;li&gt; Make sure your wallet is completely synchronized and you are using the latest release. &lt;li&gt; You must have a balance of PIVX with a minimum of 101 confirmations. &lt;li&gt; Your wallet must stay online and be unlocked for anonymization and staking purposes. &lt;li&gt; Once all those steps are followed staking should be enabled. &lt;li&gt; You can see the status of staking in the wallet by mousing over the package icon in the row on the top left of the wallet interface. There package will be lit up and will state "Staking Enabled" to indicate it is staking. Using the command line interface (pivx-cli); the command &lt;i&gt;getstakingstatus&lt;/i&gt; will confirm that staking is active. &lt;/li&gt;&lt;/ol&gt; &lt;/p&gt;&lt;p align="justify"&gt; To Stake zMXT: &lt;/p&gt;&lt;p align="justify"&gt; &lt;ol&gt;&lt;li&gt; Make sure your wallet is completely synchronized and you are using the latest release. &lt;li&gt; Your newly minted or existing zMXT balance must have a minimum of 200 confirmations. &lt;li&gt; Your wallet must stay online and be unlocked for anonymization and staking purposes. Staking should now be enabled. &lt;/li&gt;&lt;/ol&gt; &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </source> <translation> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; PIVX Staking yapabilmek için: &lt;/p&gt;&lt;p align="justify"&gt; &lt;ol&gt;&lt;li&gt; Cüzdanınızı tamamen senkronize olduğundan ve son versiyonu kullandığınızdan emin olun. &lt;li&gt; En az 101 onay almış bir PIVX bakiyeniz olmaldır. &lt;li&gt; Cüzdanınız online olmalı, stake ve anonimleştirme işlemleri için kilidini açılmış olmalıdır. &lt;li&gt; Bu adımlar tamamlanmışsa staking çalışıyor olmalıdır. &lt;li&gt; Staking durumunu kontrol etmek için fare imlecini cüzdanın sol üst kısmındaki paket ikonu üzerinde tuttuğunuzda görebilirsiniz. Yaptığını göstermek için paket ikonunun ışığı yanacak ve "Staking aktif" yazacaktır. Komut satırı arayüzünden (pivx-cli); &lt;i&gt;getstakingstatus&lt;/i&gt; komutunu çalıştırarak staking'in etkin olduğunu onaylayabilirsiniz. &lt;/li&gt;&lt;/ol&gt; &lt;/p&gt;&lt;p align="justify"&gt; zMXT Staking yapabilmek için: &lt;/p&gt;&lt;p align="justify"&gt; &lt;ol&gt;&lt;li&gt; Cüzdanınızı tamamen senkronize olduğundan ve son versiyonu kullandığınızdan emin olun. &lt;li&gt; En az 200 onay almış yeni oluşturulmuş veya eskiden bulunan zMXTX bakiyeniz olmaldır. &lt;li&gt; Cüzdanınız online olmalı, stake ve anonimleştirme işlemleri için kilidini açılmış olmalıdır. Bu adımlar tamamlanmışsa staking çalışıyor olmalıdır. &lt;/li&gt;&lt;/ol&gt; &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </translation> </message> <message> <source>8</source> <translation>8</translation> </message> <message> <source>Where I should go if I need support?</source> <translation>Yardıma ihtiyacım olduğunda nereye gitmeliyim?</translation> </message> <message> <source> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; We have support channels in most of our official chat groups, for example &lt;a style='color: #b088ff' href='https://Discord.PIVX.com'&gt; #support in our Discord&lt;/a&gt;. If you prefer to submit a ticket, One can be &lt;a style='color: #b088ff' href='https://PIVX.FreshDesk.com'&gt; our Freshdesk support site&lt;/a&gt;. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </source> <translation> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; Resmi chat gruplarımızın çoğunun içinde destek kanalları bulunmaktadır, örneğin &lt;a style='color: #b088ff' href='https://Discord.PIVX.com'&gt; Discord içinnde #support &lt;/a&gt;. Bir destek talebi açmak isterseniz,bunu &lt;a style='color: #b088ff' href='https://PIVX.FreshDesk.com'&gt; Freshdesk destek sitesinden yapabilirsiniz&lt;/a&gt;. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </translation> </message> <message> <source>9</source> <translation>9</translation> </message> <message> <source>What is a Master Node?</source> <translation>Masternode nedir?</translation> </message> <message> <source> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; A masternode is a computer running a full node PIVX core wallet with a requirement of 5,000 MXT secured collateral to provide extra services to the network and in return, receive a portion of the block reward regularly. These services include: &lt;/p&gt;&lt;p align="justify"&gt; &lt;ul&gt; &lt;li&gt;Instant transactions (FastTX)&lt;/li&gt; &lt;li&gt;A decentralized governance (Proposal Voting)&lt;/li&gt; &lt;li&gt;A decentralized budgeting system (Treasury)&lt;/li&gt; &lt;li&gt;Validation of transactions within each block&lt;/li&gt; &lt;li&gt;Act as an additional full node in the network&lt;/li&gt; &lt;/ul&gt; &lt;/p&gt;&lt;p align="justify"&gt; For providing such services, masternodes are also paid a certain portion of reward for each block. This can serve as a passive income to the masternode owners minus their running cost. &lt;/p&gt;&lt;p align="justify"&gt; Masternode Perks: &lt;/p&gt;&lt;p align="justify"&gt; &lt;ul&gt; &lt;li&gt;Participate in PIVX Governance&lt;/li&gt; &lt;li&gt;Earn Masternode Rewards&lt;/li&gt; &lt;li&gt;Commodity option for future sale&lt;/li&gt; &lt;li&gt;Help secure the PIVX network&lt;/li&gt; &lt;/ul&gt; &lt;/p&gt;&lt;p align="justify"&gt; Requirements: &lt;/p&gt;&lt;p align="justify"&gt; &lt;ul&gt; &lt;li&gt;5,000 MXT per single Masternode instance&lt;/li&gt; &lt;li&gt;Must be stored in a core wallet&lt;/li&gt; &lt;li&gt;Need dedicated IP address&lt;/li&gt; &lt;li&gt;Masternode wallet to remain online&lt;/li&gt; &lt;/ul&gt; &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </source> <translation> &lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt; Tam düğüm PIVX core cüzdanını çalıştıran, gereken 5,000 MXT teminat bakiyesini barındırarak ağa ekstra hizmetler sunan ve karşılığında da her blok ödülünden düzenli olarak pay alan bir bilgisayardır.Bu hizmetler aşağıdaki gibidir: &lt;/p&gt;&lt;p align="justify"&gt; &lt;ul&gt; &lt;li&gt;Anlık transferler (FastTX)&lt;/li&gt; &lt;li&gt;Merkeziyetsiz yönetim (Teklif Onaylama)&lt;/li&gt; &lt;li&gt;Merkeziyetsiz bütçe sistemi (Hazine)&lt;/li&gt; &lt;li&gt;Her blogun içinde işlem onayı&lt;/li&gt; &lt;li&gt;Ağda ek bir düğüm olarak görev yapmak&lt;/li&gt; &lt;/ul&gt; &lt;/p&gt;&lt;p align="justify"&gt; Bu tür hizmetler vermesi ile masternodlar her blok ödülünden belli bir oranda pay alırlar. Bu masternode sahiplerine işletim maliyetleri sonrası pasif bir gelir sunar. &lt;/p&gt;&lt;p align="justify"&gt; Masternode Avantajları: &lt;/p&gt;&lt;p align="justify"&gt; &lt;ul&gt; &lt;li&gt;PIVX yönetimine katılın&lt;/li&gt; &lt;li&gt;Masternode Ödülleri kazanın&lt;/li&gt; &lt;li&gt;İlerde satabileceğiniz bir varlık opsiyonu&lt;/li&gt; &lt;li&gt;PIVX ağını güvenliğine destek olun&lt;/li&gt; &lt;/ul&gt; &lt;/p&gt;&lt;p align="justify"&gt; Gereksinimler: &lt;/p&gt;&lt;p align="justify"&gt; &lt;ul&gt; &lt;li&gt;Her Masternod için ayrı 5,000 MXT &lt;li&gt;Core cüzdan içinde saklanmalıdır.&lt;/li&gt; &lt;li&gt;Atanmış IP adresi gerekmektedir. &lt;/li&gt; &lt;li&gt;Masternode cüzdanının online olması&lt;/li&gt; &lt;/ul&gt; &lt;/p&gt;&lt;/body&gt;&lt;/html&gt; </translation> </message> <message> <source>10</source> <translation>10</translation> </message> <message> <source>What is a Master Node Controller?</source> <translation>Masternode Kontrolörü nedir?</translation> </message> <message> <source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt;A Masternode Controller wallet is where the 5,000 MXT collateral can reside during a Controller-Remote masternode setup. It is a wallet that can activate the remote masternode wallet/s and allows you to keep your collateral coins offline while the remote masternode remains online. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source> <translation>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align="justify"&gt;Masternode Kontrol cüzdanı Masternode Uzaktan-Kontrol kurulumu yapırken içinde 5,000 MXT teminatını bulundurur. Bu cüzdan uzak masternode cüzdan/larını etkinleştirebilir, ayrıca masternodlarınız online iken teminatınızı bağlantı dışında tutmanıza imkan sağlar. &lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</translation> </message> <message> <source>Frequently Asked Questions</source> <translation>Sıkça Sorulan Sorular</translation> </message> <message> <source>You can read more here</source> <translation>Burada daha fazla okuyabilirsiniz</translation> </message> <message> <source>Exit</source> <translation>Çıkış</translation> </message> </context> <context> <name>SettingsInformationWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>Network Monitor</source> <translation>Ağ Gözlemcisi</translation> </message> <message> <source>Information</source> <translation>bilgi</translation> </message> <message> <source>General</source> <translation>Genel</translation> </message> <message> <source>Client Version: </source> <translation>Uygulama versionu</translation> </message> <message> <source>User Agent:</source> <translation>Kullanıcı Aracısı:</translation> </message> <message> <source>Using BerkeleyDB version:</source> <translation>BerkeleyDB version</translation> </message> <message> <source>Datadir: </source> <translation>Bilgi Dizini:</translation> </message> <message> <source>Startup Time: </source> <translation>Başlatma zamanı</translation> </message> <message> <source>Network</source> <translation>Ağ</translation> </message> <message> <source>Name:</source> <translation>İsim</translation> </message> <message> <source>Number Connections:</source> <translation>Bağlantı Sayısı:</translation> </message> <message> <source>Blockchain</source> <translation>Blockchain</translation> </message> <message> <source>Current Number of Blocks:</source> <translation>Mevcut Blok Sayısı</translation> </message> <message> <source>Last Block Time:</source> <translation>Son Blok Zamanı</translation> </message> <message> <source>Memory Pool</source> <translation>Hafıza Havuzu</translation> </message> <message> <source>Current Number of Transactions:</source> <translation>Mevcut İşlem Sayısı:</translation> </message> <message> <source>Main</source> <translation>Ana</translation> </message> <message> <source>Wallet Conf</source> <translation>Cüzdan Konf</translation> </message> <message> <source>Backups</source> <translation>Yedekler</translation> </message> <message> <source>No information</source> <translation>Bilgi yok</translation> </message> <message> <source>Unable to open backups folder</source> <translation>Yedek klasörü açılamadı</translation> </message> <message> <source>Unable to open pivx.conf with default application</source> <translation>pivx.conf dosyası uygulama tarafından açılamadı</translation> </message> <message> <source>In:</source> <translation>İçe:</translation> </message> <message> <source>Out:</source> <translation>Dışa:</translation> </message> </context> <context> <name>SettingsMainOptionsWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>Window</source> <translation>Pencere</translation> </message> <message> <source>Customize the application window options</source> <translation>Uygulama Penceresi seçeneklerini değiştir</translation> </message> <message> <source>CheckBox</source> <translation>CheckBox</translation> </message> <message> <source>Reset to default</source> <translation>Varsayılana konumlandır</translation> </message> <message> <source>Discard changes</source> <translation>Değişikliklerden vazgeç</translation> </message> <message> <source>SAVE</source> <translation>KAYDET</translation> </message> <message> <source>Main</source> <translation>Ana</translation> </message> <message> <source>Size of database cache</source> <translation>Veritabanı cash büyüklüğü</translation> </message> <message> <source>Number of script verification threads</source> <translation>Komut onay parça sayısı</translation> </message> <message> <source>Start PIVX on system login</source> <translation>Sistem açılışında PIVX'i başlat</translation> </message> <message> <source>Minimize to the tray instead of the taskbar</source> <translation>Taskbar yerine yerine tepsiye küçült</translation> </message> <message> <source>Minimize on close</source> <translation>Kapama tuşuna basınca küçült</translation> </message> <message> <source>Reset Options</source> <translation>Seçenekleri Resetle</translation> </message> <message> <source>You are just about to reset the app's options to the default values. Are you sure? </source> <translation>Uygulamanın seçeneklerini varsayılan değerlere dönüştürmek üzeresiniz. Emin misiniz? </translation> </message> <message> <source>Options reset succeed</source> <translation>Opsiyonları sıfırlama başarılı</translation> </message> </context> <context> <name>SettingsMultisendDialog</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Transaction Details</source> <translation>İşlem Detayları</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>SAVE</source> <translation>KAYDET</translation> </message> <message> <source>New recipient for multisend</source> <translation>ÇokluGönder için yeni alıcı</translation> </message> <message> <source>Label (optional)</source> <translation>Etiket (Opsiyonel)</translation> </message> <message> <source>Enter a label to add this address in your address book</source> <translation>Adres defterinize kaydetmek için bir etiket girin</translation> </message> <message> <source>Percentage</source> <translation>Yüzde</translation> </message> </context> <context> <name>SettingsMultisendWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>No active Master Node yet</source> <translation>Henüz aktif Masternode bulunmuyor</translation> </message> <message> <source>CheckBox</source> <translation>CheckBox</translation> </message> <message> <source>MultiSend allows you to automatically send up to 100% of your stake or masternode reward to a list of other PIVX addresses after it matures.</source> <translation>ÇokluGönder olgunlaşmış masternode ve staking ödüllerinizin 100% 'üne kadarını diğer PIVX adreslerine otomatik olarak göndermenizi sağlar. </translation> </message> <message> <source>Active</source> <translation>aktif</translation> </message> <message> <source>Disable</source> <translation>Devre dışı bırak</translation> </message> <message> <source>No active recipient yet</source> <translation>Henüz aktif alıcı bulunmuyor</translation> </message> <message> <source>Send stakes</source> <translation>Stakeleri gönder</translation> </message> <message> <source>Send masternode rewards</source> <translation>Masternode ödüllerini gönder</translation> </message> <message> <source>ADD RECIPIENT</source> <translation>ALICI EKLE</translation> </message> <message> <source>CLEAR ALL</source> <translation>TEMİZLE</translation> </message> <message> <source>Clear succeed</source> <translation>Temizlik başarılı</translation> </message> <message> <source>Clear all failed, could not locate address in wallet file</source> <translation>Hepsini temizleme işlemi başarısız, cüzdan dosyasında adres bulunamadı</translation> </message> <message> <source>The entered address: %1 is invalid. Please check the address and try again.</source> <translation>Girili adres: %1 geçersiz. Lütfen adresi kontrol ediniz ve tekrar deneyiniz.</translation> </message> <message> <source>Invalid percentage, please enter values from 1 to 100.</source> <translation>Geçersiz yüzde, lütfen 1 ile 100 arasında bir değer girin</translation> </message> <message> <source>The total amount of your MultiSend vector is over 100% of your stake reward</source> <translation>MultiSend vectorünüzün toplam miktarı stake ödülünün 100% ünden fazla</translation> </message> <message> <source>Error saving MultiSend, failed saving properties to the database.</source> <translation>ÇokluGönder kaydetme hatası, özellikler veritabanına yazılamadı.</translation> </message> <message> <source>Unable to activate MultiSend, no available recipients</source> <translation>ÇokluGönder aktive edilemedi, hazırda alıcı bulunmuyor.</translation> </message> <message> <source>Unable to activate MultiSend Check one or both of the check boxes to send on stake and/or masternode rewards</source> <translation>ÇokluGönder aktive edilemedi İşaret kuturlarından birini veya ikisini seçerek stake ve/veya masternode ödüllerinden gönderin.</translation> </message> <message> <source>MultiSend activated but writing settings to DB failed</source> <translation>Çoklu Gönderim devrede fakat ayarlar veritabanına yazılamadı</translation> </message> <message> <source>MultiSend activated</source> <translation>ÇokluGönderi geçerli hale getirildi</translation> </message> <message> <source>First multiSend address invalid</source> <translation>İlk ÇokluGönder adresi geçersiz</translation> </message> <message> <source>MultiSend deactivated but writing settings to DB failed</source> <translation>Çoklu Gönderim devredışı fakat ayarlar veritabanına yazılamadı</translation> </message> <message> <source>MultiSend deactivated</source> <translation>Çoklu Gönderim devredışı</translation> </message> </context> <context> <name>SettingsSignMessageWidgets</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>Clear</source> <translation>temizle</translation> </message> <message> <source>Sign/Verify Message</source> <translation>Mesaj İmza/Onay</translation> </message> <message> <source>Enter a PIVX address or contact label</source> <translation>Bir PIVX adresi veya bağlantı etiketi girin</translation> </message> <message> <source>Add address</source> <translation>Adres ekle</translation> </message> <message> <source>Sign</source> <translation>İmzala</translation> </message> <message> <source>Verify</source> <translation>Onayla</translation> </message> <message> <source>Select mode</source> <translation>Modu Seç</translation> </message> <message> <source>Message</source> <translation>Mesaj</translation> </message> <message> <source>Write a message</source> <translation>Bir mesaj yaz</translation> </message> <message> <source>Signature</source> <translation>İmza</translation> </message> <message> <source>SIGN</source> <translation>IMZALA</translation> </message> <message> <source>CLEAR ALL</source> <translation>TEMİZLE</translation> </message> <message> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Sahip olduğunuzu kanıtlamak için adreslerinizle mesaj imzalayabilirsiniz. Kimlik avı saldırıları sizi kimliğini onlara yazmaya kandırmaya çalışabilir, çünkü belirsiz bir şey imzalamamaya dikkat edin. Kabul ettiğiniz tamamen ayrıntılı ifadeleri yalnızca imzalayın.</translation> </message> <message> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>İmzalı adres, ileti girin (tam olarak satır sonlarını, boşlukları, sekmeleri vs. kopyaladığınızdan emin olun) ve iletiyi doğrulamak için aşağıdaki imzayı yazın. Bir adam-in-the-middle saldırısı tarafından kandırdın önlemek için imzalı mesajın kendisinden daha fazla imza okumamaya dikkat edin.</translation> </message> <message> <source>VERIFY</source> <translation>ONAYLA</translation> </message> <message> <source>The entered address is invalid.</source> <translation>Girilen adres geçersiz.</translation> </message> <message> <source>Please check the address and try again.</source> <translation>Lütfen adresini kontrol edin ve tekrar deneyin.</translation> </message> <message> <source>The entered address does not refer to a key.</source> <translation>Girilen adres bir tuşa atıfta bulunmaz.</translation> </message> <message> <source>Wallet unlock was cancelled.</source> <translation>Cüzdan kilidi açılması iptal edildi.</translation> </message> <message> <source>Private key for the entered address is not available.</source> <translation>Girilen adres için özel anahtar kullanılamıyor.</translation> </message> <message> <source>Message signing failed.</source> <translation>Mesaj imzalama başarısız oldu</translation> </message> <message> <source>Message signed.</source> <translation>Mesaj imzalandı</translation> </message> <message> <source>The signature could not be decoded.</source> <translation>İmza çözülemedi.</translation> </message> <message> <source>Please check the signature and try again.</source> <translation>Lütfen imzayı kontrol edin ve tekrar deneyin.</translation> </message> <message> <source>The signature did not match the message digest.</source> <translation>İmza, mesaj özetiyle eşleşmedi.</translation> </message> <message> <source>Message verification failed.</source> <translation>Mesaj doğrulaması başarısız oldu.</translation> </message> <message> <source>Message verified.</source> <translation>Mesaj doğrulanmış.</translation> </message> <message> <source>No addresses available, you can go to the receive screen and add some there!</source> <translation>Uygun adres bulunmuyor, AL ekranına giderek adres ekleyebilirsin.</translation> </message> </context> <context> <name>SettingsWalletOptionsWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>CheckBox</source> <translation>CheckBox</translation> </message> <message> <source>RadioButton</source> <translation>RadioButton</translation> </message> <message> <source>Reset to default</source> <translation>Varsayılana konumlandır</translation> </message> <message> <source>Discard changes</source> <translation>Değişikliklerden vazgeç</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>Wallet</source> <translation>cüzdan</translation> </message> <message> <source>Customize the internal wallet options</source> <translation>Cüzdan içi opsiyonları özelleştir</translation> </message> <message> <source>Stake split threshold:</source> <translation>Stake bölme eşiği</translation> </message> <message> <source>Spend unconfirmed change</source> <translation>Onaylanmamış tutarı harca</translation> </message> <message> <source>Network</source> <translation>Ağ</translation> </message> <message> <source>Customize the node network options</source> <translation>Düğüm ağ opsiyonlarını özelleştir</translation> </message> <message> <source>Proxy IP:</source> <translation>Proksi IP:</translation> </message> <message> <source>Enter proxy IP</source> <translation>Proksi IP'sini gir</translation> </message> <message> <source>Port:</source> <translation>Port:</translation> </message> <message> <source>Map port using UPnP</source> <translation>UPnP kullanarak proksiyi ayarla</translation> </message> <message> <source>Allow incoming connections</source> <translation>Gelen bağlantılara izin ver</translation> </message> <message> <source>Connect through SOCKS5 proxy (default proxy):</source> <translation>SOCKS5 proksisi üzerinden bağlan (varsayılan proksi)</translation> </message> <message> <source>SAVE</source> <translation>KAYDET</translation> </message> <message> <source>Options reset succeed</source> <translation>Opsiyonları sıfırlama başarılı</translation> </message> </context> <context> <name>SettingsWalletRepairWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>Wallet Repair</source> <translation>Cüzdan Onarımı</translation> </message> <message> <source>The buttons below will restart the wallet with command-line options to repair this wallet, fix issues with corrupt blockchain files or missing/obsolete transactions.</source> <translation>Aşağıdaki düğmeleri kullanarak cüzdanı tekrar başlattığınızda cüzdan onarımı, bozuk blockchain dosyaları veya eksik/süresi geçmiş işlemlerden kaynaklı hataları düzeltmek için komut satırı opsiyonlarıyla açılacaktır.</translation> </message> <message> <source>Attempt to recover private keys from a corrupt wallet.dat.</source> <translation>Bozuk bir wallet.dat'tan özel anahtarları kurtarmayı deneyin.</translation> </message> <message> <source>Rescan the blockchain for missing wallet transactions.</source> <translation>Eksik cüzdan transferleri için blockchaini tekrar tara.</translation> </message> <message> <source>Recover transactions from blockchain (keep-meta-data, e.g. account owner).</source> <translation>Blockchainden işlemleri kurtarın (meta verileri koru, örn. hesap sahibi)</translation> </message> <message> <source>Recover transactions from blockchain (drop meta-data).</source> <translation>İşlemleri blok zincirden kurtarın (meta verileri bırakın).</translation> </message> <message> <source>Upgrade wallet to latest format on startup. (Note: this is NOT an update of the wallet itself)</source> <translation>Cüzdanı başlangıçta en yeni formata yükseltin. (Not: Bu, cüzdanın kendisinin güncellemesi değildir!)</translation> </message> <message> <source>Rebuild blockchain index from current blk000???.dat files.</source> <translation>Blok zinciri dizinini mevcut blk000 ?? dat dosyalarından yeniden oluştur.</translation> </message> <message> <source>Deletes all local blockchain folders so the wallet synchronizes from scratch.</source> <translation>Cüzdan sıfırdan eşleştirmek için tüm yerel blok zincir klasörlerini siler.</translation> </message> <message> <source>Salvage wallet</source> <translation>Kurtarma cüzdanı</translation> </message> <message> <source>Rescan blockchain file</source> <translation>Blockchain dosyasını tekrar tara</translation> </message> <message> <source>Recover transactions 1</source> <translation>İşemleri kurtarın 1</translation> </message> <message> <source>Recover transactions 2</source> <translation>İşlemleri kurtar 2</translation> </message> <message> <source>Upgrade wallet format</source> <translation>Cüzdan formatını güncelleyin</translation> </message> <message> <source>Rebuild index</source> <translation>Dizini yeniden oluştur</translation> </message> <message> <source>Delete local blockchain </source> <translation>Yerel Blockchain'i sil</translation> </message> <message> <source>This will delete your local blockchain folders and the wallet will synchronize the complete Blockchain from scratch.&lt;br /&gt;&lt;br /&gt;</source> <translation>Bu, yerel blok zincir klasörlerini silecek ve cüzdan tüm Blockchain'i sıfırdan senkronize edecektir..&lt;br /&gt;&lt;br /&gt;</translation> </message> <message> <source>This needs quite some time and downloads a lot of data.&lt;br /&gt;&lt;br /&gt;</source> <translation>Bu biraz zaman nalacak ve birçok veri indirecektir.&lt;br /&gt;&lt;br /&gt;</translation> </message> <message> <source>Your transactions and funds will be visible again after the download has completed.&lt;br /&gt;&lt;br /&gt;</source> <translation>İşlemleriniz ve bakiyeniz indirme tamamıyle tamamlandıktan sonra tekrar görünür olacaktır.&lt;br /&gt;&lt;br /&gt;</translation> </message> <message> <source>Do you want to continue?.&lt;br /&gt;</source> <translation>Devam etmek istiyor musunuz?.&lt;br /&gt;</translation> </message> <message> <source>Confirm resync Blockchain</source> <translation>Blockchaini resenkronize etmeyi onaylayın</translation> </message> </context> <context> <name>SettingsWidget</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>Wallet Data</source> <translation>Cüzdan Bilgisi</translation> </message> <message> <source>Wallet</source> <translation>cüzdan</translation> </message> <message> <source>Multisend</source> <translation>Çoklu Gönderim</translation> </message> <message> <source>Tools</source> <translation>Araçlar</translation> </message> <message> <source>Sign/Verify Message</source> <translation>Mesaj İmza/Onay</translation> </message> <message> <source>BIP38 Tool</source> <translation>BIP38 Aracı</translation> </message> <message> <source>Options</source> <translation>Seçenekler</translation> </message> <message> <source>Main</source> <translation>Ana</translation> </message> <message> <source>Display</source> <translation>Görüntü</translation> </message> <message> <source>Debug</source> <translation>Hata Ayıklama</translation> </message> <message> <source>Information</source> <translation>bilgi</translation> </message> <message> <source>Console</source> <translation>Konsol</translation> </message> <message> <source>Wallet Repair</source> <translation>Cüzdan Onarımı</translation> </message> <message> <source>Help</source> <translation>Yardım</translation> </message> <message> <source>FAQ</source> <translation>SSS</translation> </message> <message> <source>About PIVX</source> <translation>PIVX Hakkında</translation> </message> <message> <source>Settings</source> <translation>Ayarlar</translation> </message> <message> <source>Confirm options reset</source> <translation>Seçeneklerin sıfırlanmasını onayla</translation> </message> <message> <source>Client restart required to activate changes.</source> <translation>Değişiklikleri etkinleştirmek için istemci yeniden başlatılması gerekiyor.</translation> </message> <message> <source>Client will be shutdown, do you want to proceed?</source> <translation>Cüzdan kapanacak, devam etmek istiyor musunuz?</translation> </message> <message> <source>Restart required</source> <translation>Tekrar başlatma gerekiyor</translation> </message> <message> <source>Your wallet needs to be restarted to apply the changes </source> <translation>Değişikliklerin etkinleşmesi için cüzdanınızın tekrar başlatılması gerekiyor. </translation> </message> <message> <source>Restart Now</source> <translation>Şimdi Tekrar Başlatın</translation> </message> <message> <source>Restart Later</source> <translation>Sonra Tekrar Başlatın</translation> </message> <message> <source>Options will be applied on next wallet restart</source> <translation>Seçenekler cüzdan tekrar başlatıldığında uygulanacaktır</translation> </message> <message> <source>Options stored</source> <translation>Seçenekler yüklendi</translation> </message> <message> <source>Options store failed</source> <translation>Seçenekler yüklenemedi</translation> </message> <message> <source>Discard Unsaved Changes</source> <translation>Kaydedilmemiş değişikliklerden vazgeç</translation> </message> <message> <source>You are just about to discard all of your unsaved options. Are you sure? </source> <translation>Şu an kaydedilmemiş tüm değişikliklerden vazgeçmek üzeresiniz Emin misiniz? </translation> </message> </context> <context> <name>ShutdownWindow</name> <message> <source>PIVX Core is shutting down...</source> <translation>PIVX Core kapanıyor ...</translation> </message> <message> <source>Do not shut down the computer until this window disappears.</source> <translation>Bu pencere kaybolmadan Bilgisayarı kapatmayın.</translation> </message> </context> <context> <name>SnackBar</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Contact Stored</source> <translation>Bağlantı yüklendi</translation> </message> </context> <context> <name>Splash</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Loading…</source> <translation>Yükleniyor...</translation> </message> <message> <source>PIVX Core</source> <translation>PIVX Core</translation> </message> </context> <context> <name>SplashScreen</name> <message> <source>PIVX Core</source> <translation>PIVX CORE</translation> </message> <message> <source>Version %1</source> <translation>Version %1</translation> </message> <message> <source>The Bitcoin Core developers</source> <translation>The Bitcoin Core developers</translation> </message> <message> <source>The Dash Core developers</source> <translation>The Dash Core developers</translation> </message> <message> <source>The PIVX Core developers</source> <translation>The PIVX Core developers</translation> </message> <message> <source>[testnet]</source> <translation>[testnet]</translation> </message> </context> <context> <name>TooltipMenu</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Copy</source> <translation>Kopyala</translation> </message> <message> <source>Edit</source> <translation>Güncelle</translation> </message> <message> <source>Delete</source> <translation>Sil</translation> </message> <message> <source>Last</source> <translation>Son</translation> </message> </context> <context> <name>TopBar</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>480.0685 PIV</source> <translation>480.0685 PIV</translation> </message> <message> <source>1,000 zMXT</source> <translation>1,000 zMXT</translation> </message> <message> <source>Available</source> <translation>Uygun</translation> </message> <message> <source>Pending</source> <translation>Beklemede</translation> </message> <message> <source>6.943 PIV</source> <translation>6.943 PIV</translation> </message> <message> <source>Immature</source> <translation>Olgunlaşmamış</translation> </message> <message> <source>10 PIV</source> <translation>10 PIV</translation> </message> <message> <source>60 zMXT</source> <translation>60 zMXT</translation> </message> <message> <source>10 zMXT</source> <translation>10 zMXT</translation> </message> <message> <source>Wallet Unlocked for staking</source> <translation>Cüzdan Stake için açıldı</translation> </message> <message> <source>Error generating address</source> <translation>Adres oluşturma hatası</translation> </message> <message> <source>Address Copied</source> <translation>Adres kopyalandı</translation> </message> <message> <source>Automint enabled</source> <translation>Otomatik basım etkin</translation> </message> <message> <source>Automint disabled</source> <translation>Otomatik basım kapalı</translation> </message> <message> <source>Staking active</source> <translation>Stake etkin</translation> </message> <message> <source>Staking not active</source> <translation>Stake kapalı</translation> </message> <message numerus="yes"> <source>%n active connection(s)</source> <translation><numerusform>%n aktif bağlantı(lar) mevcut PIVX ağına</numerusform><numerusform>%n aktif bağlantı(lar)</numerusform></translation> </message> <message> <source>Synchronized</source> <translation>Senkron</translation> </message> <message numerus="yes"> <source>%n hour(s)</source> <translation><numerusform>%n saat</numerusform><numerusform>%n saat</numerusform></translation> </message> <message numerus="yes"> <source>%n day(s)</source> <translation><numerusform>%n gün</numerusform><numerusform>%n gün</numerusform></translation> </message> <message numerus="yes"> <source>%n week(s)</source> <translation><numerusform>%n hafta</numerusform><numerusform>%n hafta</numerusform></translation> </message> <message> <source>%1 and %2</source> <translation>%1 ve %2</translation> </message> <message numerus="yes"> <source>%n year(s)</source> <translation><numerusform>%n yıl</numerusform><numerusform>%n yıl</numerusform></translation> </message> </context> <context> <name>TrafficGraphWidget</name> <message> <source>KB/s</source> <translation>KB/s</translation> </message> </context> <context> <name>TransactionDesc</name> <message numerus="yes"> <source>Open for %n more block(s)</source> <translation><numerusform>%n blok daha için açık tut</numerusform><numerusform>%n blok daha için açık tut</numerusform></translation> </message> <message> <source>Open until %1</source> <translation>Açık %1 e dek</translation> </message> <message> <source>conflicted</source> <translation>çatışmalı</translation> </message> <message> <source>%1/offline</source> <translation>%1 / çevrimdışı</translation> </message> <message> <source>%1/unconfirmed</source> <translation>%1 / doğrulanmamış</translation> </message> <message> <source>%1 confirmations</source> <translation>%1 doğrulama</translation> </message> <message> <source>%1/offline (verified via FastTX)</source> <translation>%1 / çevrimdışı (FastTX ile doğrulanmış)</translation> </message> <message> <source>%1/confirmed (verified via FastTX)</source> <translation>%1 / doğrulandı (FastTX ile doğrulandı)</translation> </message> <message> <source>%1 confirmations (verified via FastTX)</source> <translation>%1 doğrulama (FastTX ile doğrulandı)</translation> </message> <message> <source>%1/offline (FastTX verification in progress - %2 of %3 signatures)</source> <translation>%1 / çevrimdışı (FastTX doğrulama işlemi devam ediyor - %3 imzalardan %2 si)</translation> </message> <message> <source>%1/confirmed (FastTX verification in progress - %2 of %3 signatures )</source> <translation>%1 / onaylandı (FastTX doğrulama işlemi - %3 imzalardan %2 si)</translation> </message> <message> <source>%1 confirmations (FastTX verification in progress - %2 of %3 signatures)</source> <translation>%1 doğrulama (FastTX doğrulama işlemi - %3 imzalardan %2 si)</translation> </message> <message> <source>%1/offline (FastTX verification failed)</source> <translation>%1 / çevrimdışı (FastTX doğrulama başarısız)</translation> </message> <message> <source>%1/confirmed (FastTX verification failed)</source> <translation>%1 / onaylandı (FastTX doğrulama başarısız oldu)</translation> </message> <message> <source>Status</source> <translation>Durum</translation> </message> <message> <source>, has not been successfully broadcast yet</source> <translation>, henüz başarıyla yayınlanmamış</translation> </message> <message numerus="yes"> <source>, broadcast through %n node(s)</source> <translation><numerusform>, %n düğüm üzerinden yayın</numerusform><numerusform>, %n düğüm üzerinden yayın</numerusform></translation> </message> <message> <source>Date</source> <translation>Tarih</translation> </message> <message> <source>Source</source> <translation>Kaynak</translation> </message> <message> <source>Generated</source> <translation>Oluşturuldu</translation> </message> <message> <source>From</source> <translation>Tarafından</translation> </message> <message> <source>unknown</source> <translation>Bilinmeyen</translation> </message> <message> <source>To</source> <translation>Şuna</translation> </message> <message> <source>own address</source> <translation>Kendi adresin</translation> </message> <message> <source>watch-only</source> <translation>sadece-izle </translation> </message> <message> <source>label</source> <translation>Etiket</translation> </message> <message> <source>Credit</source> <translation>Kredi</translation> </message> <message numerus="yes"> <source>matures in %n more block(s)</source> <translation><numerusform>%n blok sonra olgunlaşacak</numerusform><numerusform>%n blok sonra olgunlaşacak</numerusform></translation> </message> <message> <source>not accepted</source> <translation>kabul edilmez</translation> </message> <message> <source>Debit</source> <translation>Debit</translation> </message> <message> <source>Total debit</source> <translation>Total debit</translation> </message> <message> <source>Total credit</source> <translation>Total Kredi</translation> </message> <message> <source>Transaction fee</source> <translation>İşlem ücreti</translation> </message> <message> <source>Net amount</source> <translation>Net miktar</translation> </message> <message> <source>Message</source> <translation>Mesaj</translation> </message> <message> <source>Comment</source> <translation>Yorum</translation> </message> <message> <source>Transaction ID</source> <translation>İşlem ID si</translation> </message> <message> <source>Output index</source> <translation>Çıktı İndeksi</translation> </message> <message> <source>Merchant</source> <translation>Satıcı</translation> </message> <message> <source>Debug information</source> <translation>Debug bilgisi</translation> </message> <message> <source>Transaction</source> <translation>İşlem</translation> </message> <message> <source>Inputs</source> <translation>Girdiler</translation> </message> <message> <source>Amount</source> <translation>Miktar</translation> </message> <message> <source>true</source> <translation>true</translation> </message> <message> <source>false</source> <translation>false</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <source>Date</source> <translation>Tarih</translation> </message> <message> <source>Type</source> <translation>yaz</translation> </message> <message> <source>Address</source> <translation>adres</translation> </message> <message numerus="yes"> <source>Open for %n more block(s)</source> <translation><numerusform>%n blok daha için açık </numerusform><numerusform>%n blok daha için açık </numerusform></translation> </message> <message> <source>Open until %1</source> <translation>Açık %1 e dek</translation> </message> <message> <source>Offline</source> <translation>Çevrimdışı</translation> </message> <message> <source>Unconfirmed</source> <translation>Doğrulanmamış</translation> </message> <message> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation>Teyit ediliyor (%2 onaylamalı onayların %1'i)</translation> </message> <message> <source>Confirmed (%1 confirmations)</source> <translation>Onaylandı (%1 doğrulama)</translation> </message> <message> <source>Conflicted</source> <translation>Çatışmış</translation> </message> <message> <source>Immature (%1 confirmations, will be available after %2)</source> <translation>Olgunlaşmamış (%1 doğrulama, %2'den sonra mevcut olacak)</translation> </message> <message> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Bu blok diğer düğümler tarafından alınmadı ve muhtemelen kabul edilmeyecek!</translation> </message> <message> <source>Received with</source> <translation>ile Alınmış</translation> </message> <message> <source>Masternode Reward</source> <translation>Masternode Ödülü </translation> </message> <message> <source>Received from</source> <translation>Tarafından alındı</translation> </message> <message> <source>Received via Anonsend</source> <translation>Şaşırtma yoluyla Alınan</translation> </message> <message> <source>PIV Stake</source> <translation>PIV Stake</translation> </message> <message> <source>zMXT Stake</source> <translation>zMXT Stake</translation> </message> <message> <source>PIV Cold Stake</source> <translation>PIV Soğuk Stake</translation> </message> <message> <source>PIV Stake in behalf of</source> <translation>Yerine MXT stake </translation> </message> <message> <source>Stake delegation</source> <translation>Stake delegasyonu</translation> </message> <message> <source>Anonsend Denominate</source> <translation>Şaşırtmaca denominate</translation> </message> <message> <source>Anonsend Collateral Payment</source> <translation>Soruna Katılma Teminat Ödemesi</translation> </message> <message> <source>Anonsend Make Collateral Inputs</source> <translation>Şaşırtma yan girdiler yapar</translation> </message> <message> <source>Anonsend Create Denominations</source> <translation>Anonsend Oluşturma Adları</translation> </message> <message> <source>Converted MXT to zMXT</source> <translation>PIV'den zMXT'e dönüştürülen</translation> </message> <message> <source>Spent zMXT</source> <translation>zMXT Harca</translation> </message> <message> <source>Received MXT from zMXT</source> <translation>zMXT'den MXT olarak alınan</translation> </message> <message> <source>Minted Change as zMXT from zMXT Spend</source> <translation>zMXT harcamasından kaynaklanan zMXT mint farkı</translation> </message> <message> <source>Converted zMXT to PIV</source> <translation>zMXT'den PIV' dönüştürülen</translation> </message> <message> <source>Sent to</source> <translation>Gönderilen </translation> </message> <message> <source>Orphan Block - Generated but not accepted. This does not impact your holdings.</source> <translation>Yetim Blok - Oluşturuldu, ancak kabul edilmedi. Bu, varlıklarınızı etkilemez.</translation> </message> <message> <source>Payment to yourself</source> <translation>Kendine ödeme</translation> </message> <message> <source>Mined</source> <translation>Mined </translation> </message> <message> <source>Obfuscated</source> <translation>Şaşırtıldı </translation> </message> <message> <source>watch-only</source> <translation>sadece-izle </translation> </message> <message> <source>Anonymous</source> <translation>Anonim</translation> </message> <message> <source>No information</source> <translation>Bilgi yok</translation> </message> <message> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>İşlem Durumu. Teyit sayısını göstermek için imlecinizi bu alanın üzerine getirin.</translation> </message> <message> <source>Date and time that the transaction was received.</source> <translation>İşlemin alındığı tarih ve saat.</translation> </message> <message> <source>Type of transaction.</source> <translation>İşlem Tipi</translation> </message> <message> <source>Whether or not a watch-only address is involved in this transaction.</source> <translation>Yalnızca izleme amaçlı adresin bu işleme dahil edilip edilmediği.</translation> </message> <message> <source>Destination address of transaction.</source> <translation>İşlemin varış adresi</translation> </message> <message> <source>Amount removed from or added to balance.</source> <translation>Bakiye kaldırılan veya dengeye eklenen miktar.</translation> </message> </context> <context> <name>TxDetailDialog</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Transaction Details</source> <translation>İşlem Detayları</translation> </message> <message> <source>ID:</source> <translation>ID:</translation> </message> <message> <source>492526e7fa3c810b35016...40a5df85ee227ab00b1156994</source> <translation>492526e7fa3c810b35016...40a5df85ee227ab00b1156994</translation> </message> <message> <source>Sending to: </source> <translation>Gönderiliyor: </translation> </message> <message> <source>D7VFR83SQbiezrW72hjcWJtcfip5krte2Z </source> <translation>D7VFR83SQbiezrW72hjcWJtcfip5krte2Z </translation> </message> <message> <source>Total amount</source> <translation>Toplam Tutar:</translation> </message> <message> <source>2 PIV</source> <translation>2 PIV</translation> </message> <message> <source>Coin inputs:</source> <translation>Koin girdileri:</translation> </message> <message> <source>1 Inputs</source> <translation>1 girdi</translation> </message> <message> <source>Previous Transaction</source> <translation>Önceki İşlem</translation> </message> <message> <source>Output Index</source> <translation>Çıktı Endeksi</translation> </message> <message> <source>Fee:</source> <translation>Ücret:</translation> </message> <message> <source>0.0001 PIV</source> <translation>0.0001 PIV</translation> </message> <message> <source>Change address:</source> <translation>Değişim Adresi</translation> </message> <message> <source>D7VFR83SQbie…BhjcWJtcfip5krte2Z </source> <translation>D7VFR83SQbie…BhjcWJtcfip5krte2Z </translation> </message> <message> <source>Confirmations:</source> <translation>Onaylar:</translation> </message> <message> <source>12</source> <translation>12</translation> </message> <message> <source>Size:</source> <translation>Büyüklük:</translation> </message> <message> <source>2 kB</source> <translation>2 kB</translation> </message> <message> <source>Date:</source> <translation>Tarih:</translation> </message> <message> <source>May 25, 2017</source> <translation>May 25, 2017</translation> </message> <message> <source>Status:</source> <translation>Durum:</translation> </message> <message> <source>Spendable</source> <translation>Harcanabilir</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> <message> <source>Confirm Your Transaction</source> <translation>İşleminizi Onaylayın</translation> </message> <message> <source>SEND</source> <translation>GÖNDER</translation> </message> <message> <source>ID copied</source> <translation>ID kopyalandı</translation> </message> <message> <source>Unknown</source> <translation>Bilinmeyen</translation> </message> </context> <context> <name>TxRow</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Received from Bob</source> <translation>Bob'tan geldi</translation> </message> <message> <source>18/05/18</source> <translation>18/05/18</translation> </message> <message> <source>+0.000585 PIV</source> <translation>+0.000585 PIV</translation> </message> </context> <context> <name>WalletModel</name> <message> <source>Send Coins</source> <translation>Koinleri göder</translation> </message> <message> <source>FastTX doesn't support sending values that high yet. Transactions are currently limited to %1 PIV.</source> <translation>FastTX yüksek değerler göndermeyi desteklemiyor. İşlemler şu anda %1 MXT ile sınırlıdır.</translation> </message> </context> <context> <name>WalletPasswordDialog</name> <message> <source>Dialog</source> <translation>Diyalog</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim.</source> <translation>Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim.</translation> </message> <message> <source>CANCEL</source> <translation>İPTAL</translation> </message> <message> <source>OK</source> <translation>TAMAM</translation> </message> </context> <context> <name>WelcomeContentWidget</name> <message> <source>Dialog</source> <translation>Diyalog</translation> </message> <message> <source>1</source> <translation>1</translation> </message> <message> <source>2</source> <translation>2</translation> </message> <message> <source>3</source> <translation>3</translation> </message> <message> <source>4</source> <translation>4</translation> </message> <message> <source>Language</source> <translation>Dil</translation> </message> <message> <source>Welcome</source> <translation>Hoşgeldiniz</translation> </message> <message> <source>Privacy</source> <translation>Gizlilik</translation> </message> <message> <source>Masternodes</source> <translation>Masternode'lar</translation> </message> <message> <source>Select your language</source> <translation>Dilinizi seçin</translation> </message> <message> <source>Welcome to PIVX Core Wallet</source> <translation>PIVX Core Cüzdanı'na Hoşgeldiniz</translation> </message> <message> <source>PIVX is the world’s most innovative Proof of Stake blockchain based technology. Developed by a team of highly experienced developers and cryptographers.</source> <translation>PIVX Proof of Stake tabanlı dünyadaki en yenilikçi blockchain teknolojisidir. Yüksek tecrübeye sahip yazılımcılar ve kriptograflardan oluşan bir ekip tarafından geliştirilmiştir.</translation> </message> <message> <source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;As our manifesto says: Privacy is a non-negotiable basic human right; it grants users the freedom to share their data whenever and with whomever they want - PIVX believes in self sovereignty.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source> <translation>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;Manifestomuzda yazdığı gibi: Gizlilik, pazarlık yapılamayacak temel bir insan hakkıdır; kullanıcılarına istedikleri zaman ve istedikleri kişiyle verilerini paylaşma özgürlüğü verir - PIVX özerkliğe inanır.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</translation> </message> <message> <source>The masternode network is PIVX's second layer network on top of the blockchain that enables our DAO to provide decentralized governance and a treasury.</source> <translation>Masternode ağı dağıtık yönetim ve hazine yönetimini sağlayan PIVX blockchaini üzerindeki ikinci katmandır.</translation> </message> <message> <source>How PIVX respects your privacy?</source> <translation>PIVX gizliliğinize nasıl saygı duyar?</translation> </message> <message> <source>What is a Masternode?</source> <translation>Masternode nedir?</translation> </message> <message> <source>default</source> <translation>varsayılan</translation> </message> </context> <context> <name>ZMxtControlDialog</name> <message> <source>Select zMXT to Spend</source> <translation>Harcanacak zMXT'leri seçin</translation> </message> <message> <source>Coin Control</source> <translation>Koin kontrolü</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> <message> <source>zMxt</source> <translation>zMxt</translation> </message> <message> <source>0.00 </source> <translation>0.00 </translation> </message> <message> <source>Quantity:</source> <translation>Miktar</translation> </message> <message> <source>Is Spendable</source> <translation>Harcanabilir</translation> </message> <message> <source>0</source> <translation>0</translation> </message> <message> <source>Select/Deselect All</source> <translation>Seç/Çıkar Hepsini</translation> </message> <message> <source>Select zMXT Denominations to Spend</source> <translation>Harcanacak zMXT Bölümlerini Seçin</translation> </message> </context> <context> <name>pivx-core</name> <message> <source>(1 = keep tx meta data e.g. account owner and payment request information, 2 = drop tx meta data)</source> <translation>(1 = tx meta verileri, örneğin hesap sahibi ve ödeme talebi bilgileri, 2 = tx meta verileri bırak)</translation> </message> <message> <source>Allow JSON-RPC connections from specified source. Valid for &lt;ip&gt; are a single IP (e.g. 1.2.3.4), a network/netmask (e.g. 1.2.3.4/255.255.255.0) or a network/CIDR (e.g. 1.2.3.4/24). This option can be specified multiple times</source> <translation>JSON-RPC bağlantılarını seçilmiş kaynaktan izin ver. Geçerli &lt;ip&gt; sadece tek IP için (e.g. 1.2.3.4), a network/netmask (e.g. 1.2.3.4/255.255.255.0) or a network/CIDR (e.g. 1.2.3.4/24). TBu seçenek defalarca kez belirlenebilir.</translation> </message> <message> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Verilen adrese bağlanın ve onu dinleyin. IPv6 için [host]: port gösterimini kullanın</translation> </message> <message> <source>Bind to given address and whitelist peers connecting to it. Use [host]:port notation for IPv6</source> <translation>Verilen adrese ve ona bağlanan beyaz listelere bağlayın. IPv6 için [host]: port gösterimini kullanın</translation> </message> <message> <source>Bind to given address to listen for JSON-RPC connections. Use [host]:port notation for IPv6. This option can be specified multiple times (default: bind to all interfaces)</source> <translation>JSON-RPC bağlantılarını dinlemek için verilen adrese bağlanın. IPv6 için [host]: port gösterimini kullanın. Bu seçenek birden çok kez belirtilebilir (varsayılan: tüm arabirimlere bağlanır)</translation> </message> <message> <source>Calculated accumulator checkpoint is not what is recorded by block index</source> <translation>Hesaplanan akümülatör kontrol noktası, blok indeksiyle kaydedilen kontrol noktası değildir.</translation> </message> <message> <source>Cannot obtain a lock on data directory %s. PIVX Core is probably already running.</source> <translation>Veri dizini %s üzerinde bir kilit elde edemiyor. PIVX Core muhtemelen zaten çalışıyor.</translation> </message> <message> <source>Change automatic finalized budget voting behavior. mode=auto: Vote for only exact finalized budget match to my generated budget. (string, default: auto)</source> <translation>Otomatik sonuçlandırılmış bütçe oylama davranışını değiştirin. mode = auto: Yalnızca kesin bütçelendirilmiş bütçeyi, oluşturulan bütçemle eşleştirmek için oy verin. (dize, varsayılan: otomatik)</translation> </message> <message> <source>Continuously rate-limit free transactions to &lt;n&gt;*1000 bytes per minute (default:%u)</source> <translation>Ücretsiz işlemleri belirli aralıklarla sınırla &lt;n&gt;*1000 bytes dakikada (varsayılan: %u)</translation> </message> <message> <source>Create new files with system default permissions, instead of umask 077 (only effective with disabled wallet functionality)</source> <translation>Sistem varsayılan izinleri ile yeni dosyalar oluştur, 077 i açığa çıkartmak yerine (sadece cüzdan fonksiyonu etkisiz hale getirildiğinde efektiftir.)</translation> </message> <message> <source>Delete all wallet transactions and only recover those parts of the blockchain through -rescan on startup</source> <translation>Tüm cüzdan işlemlerini sil ve sadece -rescan on startup komutuyla ilgili kısmımları sadece kurtar</translation> </message> <message> <source>Delete all zerocoin spends and mints that have been recorded to the blockchain database and reindex them (0-1, default: %u)</source> <translation>Blockchain'e kaydedilmiş tüm zerocoin ve mintleri sil ve tekrar endeksle (0-1, varsayılan: %u)</translation> </message> <message> <source>Distributed under the MIT software license, see the accompanying file COPYING or &lt;http://www.opensource.org/licenses/mit-license.php&gt;.</source> <translation>MIT yazılım lisansı altında dağıtılmıştır, eşlik eden KOPYALAMA dosyasına bakın veya &lt;http://www.opensource.org/licenses/mit-license.php&gt;.</translation> </message> <message> <source>Enable automatic Zerocoin minting from specific addresses (0-1, default: %u)</source> <translation>Belirli bir adresten otomatik zMXT minting yapılmasını etkinleştir (0-1, varsayılan: %u)</translation> </message> <message> <source>Enable automatic wallet backups triggered after each zMXT minting (0-1, default: %u)</source> <translation>Her zMXT mintleme sonrası otomatik cüzdan yedeklemesini aktifleştir (0-1, varsayılan:%u)</translation> </message> <message> <source>Enable cold staking functionality (0-1, default: %u). Disabled if staking=0</source> <translation>Soğuk stakeleme fonksiyonunu etkinleştirin (0-1, varsayılan: %u). Staking=0 ise etkin değildir</translation> </message> <message> <source>Enable or disable staking functionality for MXT inputs (0-1, default: %u)</source> <translation>PIV girişleri için stake fonksiyonunu aç veya kapat (0-1, varsayılan: %u)</translation> </message> <message> <source>Enable or disable staking functionality for zMXT inputs (0-1, default: %u)</source> <translation>zMXT girişleri için stake fonksiyonunu aç veya kapat (0-1, varsayılan: %u)</translation> </message> <message> <source>Enable spork administration functionality with the appropriate private key.</source> <translation>Uygun private key ile Spork yönetim özelliğine izin verin.</translation> </message> <message> <source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</source> <translation>Regresyon test modunu girin, bu bloklar anında çözülebilen özel bir zincir kullanır.</translation> </message> <message> <source>Error: Listening for incoming connections failed (listen returned error %s)</source> <translation>Hata: Gelen bağlantıları dinleme başarısız oldu (dinleme dönmüş hata %s)</translation> </message> <message> <source>Error: The transaction is larger than the maximum allowed transaction size!</source> <translation>Hata: Transfer izin verilen maksimum transfer büyüklüğünden fazla!</translation> </message> <message> <source>Error: Unsupported argument -socks found. Setting SOCKS version isn't possible anymore, only SOCKS5 proxies are supported.</source> <translation>Hata: Desteklenmeyen argüman-socks bulundu. SOCKS versiyonunu ayarlamak artık mümkün değil, sadece SOCKS5 vekilleri destekleniyor.</translation> </message> <message> <source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source> <translation>Alakalı bir uyarı alındığında komutu çalıştırın veya gerçekten uzun bir çatalı görürüz (cmd'deki %s yerine mesaj gönderilir)</translation> </message> <message> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Cüzdan işlemi değiştiğinde komutu yürütün (cmd'deki %s yerine TxID yazılır)</translation> </message> <message> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>En iyi blok değiştirildiğinde komutu yürütün (cmd'deki %s yerine blok karması verilir)</translation> </message> <message> <source>Fees (in PIV/Kb) smaller than this are considered zero fee for relaying (default: %s)</source> <translation>Bundan daha küçük ücretler (PIV / Kb cinsinden), aktarma için sıfır ücret olarak kabul edilir (varsayılan: %s)</translation> </message> <message> <source>Fees (in PIV/Kb) smaller than this are considered zero fee for transaction creation (default: %s)</source> <translation>Bundan daha küçük ücretler (PIV / Kb cinsinden), işlem yaratmada sıfır ücret olarak kabul edilir (varsayılan: %s)</translation> </message> <message> <source>Flush database activity from memory pool to disk log every &lt;n&gt; megabytes (default: %u)</source> <translation>Bellek havuzundan disk günlüğüne her veritabanı aktivitesini temizle&lt;n&gt; megabytes (default: %u)</translation> </message> <message> <source>If paytxfee is not set, include enough fee so transactions begin confirmation on average within n blocks (default: %u)</source> <translation>Paytxfee belirlenmemişse, işlemlerin ortalama n blokta onaylanmaya başlaması için yeterli ücret ekleyin (varsayılan: %u)</translation> </message> <message> <source>In this mode -genproclimit controls how many blocks are generated immediately.</source> <translation>Bu modda, -genproclimit, kaç blok üretileceğini hemen kontrol eder.</translation> </message> <message> <source>Insufficient or insufficient confirmed funds, you might need to wait a few minutes and try again.</source> <translation>Yetersiz veya yetersiz teyit edilmiş para, birkaç dakika bekleyip tekrar deneyebilirsiniz.</translation> </message> <message> <source>Invalid amount for -maxtxfee=&lt;amount&gt;: '%s' (must be at least the minrelay fee of %s to prevent stuck transactions)</source> <translation>Yetersiz miktar için -maxtxfee=&lt;amount&gt;: '%s' (sıkışmış işlemleri önlemek için en az %s'lik minrelay ücreti olmalıdır)</translation> </message> <message> <source>Keep the specified amount available for spending at all times (default: 0)</source> <translation>Belirlenen tutarı her zaman harcamaya hazır durumda tut. (varsayılan:0)</translation> </message> <message> <source>Log transaction priority and fee per kB when mining blocks (default: %u)</source> <translation>Blok kazarken işlem önceliğini ve kB başına ücretin logunu tut (varsayılan: %u)</translation> </message> <message> <source>Maintain a full transaction index, used by the getrawtransaction rpc call (default: %u)</source> <translation>getrawtransaction rpc araması tarafından kullanılan tam işlem endeksini edin (varsayılan: %u)</translation> </message> <message> <source>Maximum average size of an index occurrence in the block spam filter (default: %u)</source> <translation>Blok spam filtresi içinde bir endeks oluşumun ortalama maksimum büyüklüğü (varsayılan: %u)</translation> </message> <message> <source>Maximum size of data in data carrier transactions we relay and mine (default: %u)</source> <translation>Bilgi taşıyan işlemlerden mine edilen ve aktarılan maksimum bilgi büyüklüğü (varsayılan: %u)</translation> </message> <message> <source>Maximum size of the list of indexes in the block spam filter (default: %u)</source> <translation>Blok spam filtresi içinde bir endeks listesinin maksimum büyüklüğü (varsayılan: %u)</translation> </message> <message> <source>Maximum total fees to use in a single wallet transaction, setting too low may abort large transactions (default: %s)</source> <translation>Tekil cüzdan işleminde kullanılacak maksimum toplam ücret, çok düşük tutulduğunda büyük transferler iptal edilebilir (varsayılan: %s)</translation> </message> <message> <source>Number of seconds to keep misbehaving peers from reconnecting (default: %u)</source> <translation>Uyumsuz davranan eşleri tekrar bağlanmaktan saniye cinsinden uzak tutma süresi (varsayılan: %u)</translation> </message> <message> <source>Anonsend uses exact denominated amounts to send funds, you might simply need to anonymize some more coins.</source> <translation>Gizleme özelliği gönderilecek tutarın kesin bölümlemelerini kullanır, sadece daha fazla koin anonimleştirmeniz gerekiyor.</translation> </message> <message> <source>Output debugging information (default: %u, supplying &lt;category&gt; is optional)</source> <translation>Hata ayıklama bilgisini dışarı aktar (varsayılan: %u, sağlamak&lt;category&gt; opsiyoneldir)</translation> </message> <message> <source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source> <translation>Adresler azsa eş adreslerini DNS lookup yoluyla ara (varsayılan: 1 unless -connect)</translation> </message> <message> <source>Randomize credentials for every proxy connection. This enables Tor stream isolation (default: %u)</source> <translation>Her proksi bağlantısınıda tanıtma bilgisini rasgeleleştir. Bu tor akış izolasyonuna imkan verir (varsayılan: %u)</translation> </message> <message> <source>Require high priority for relaying free or low-fee transactions (default:%u)</source> <translation>Ücretsiz yada düşük ücretli işlemlerde yüksek öncelik ara (varsayılan:%u)</translation> </message> <message> <source>Send trace/debug info to console instead of debug.log file (default: %u)</source> <translation>Takip/hata ayıklama bilgilerini debug.log dosyası yerine konsola gönder (varsayılan: %u)</translation> </message> <message> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source> <translation>Yüksek öncelik/düşük maliyet işlemlerin maksimum blok büyüklüğünü belirle (varsayılan: %d)</translation> </message> <message> <source>Set the number of included blocks to precompute per cycle. (minimum: %d) (maximum: %d) (default: %d)</source> <translation>Her döngüde eklenen blokların ön işlemeye alınacak sayısını belirle. (minimum: %d) (maksimum: %d) (varsayılan: %d)</translation> </message> <message> <source>Set the number of script verification threads (%u to %d, 0 = auto, &lt;0 = leave that many cores free, default: %d)</source> <translation>Betik onaylama işlem parça sayısını belirle (%u 'den %d 'ye, 0 = otomatik, &lt;0 = işlemci çekirdeklerini bırak, varsayılan: %d)</translation> </message> <message> <source>Set the number of threads for coin generation if enabled (-1 = all cores, default: %d)</source> <translation>Koin oluşturma etkinleştirilmiş ise, işlem parça sayısını belirle (-1 = tüm işlemci çekirdekleri, varsayılan: %d)</translation> </message> <message> <source>Show N confirmations for a successfully locked transaction (0-9999, default: %u)</source> <translation>Başarıyla kilitlenmiş işlem için N tane onay göster (0-9999, varsayılan: %u)</translation> </message> <message> <source>Support filtering of blocks and transaction with bloom filters (default: %u)</source> <translation>Bloom filtrelerini kullanarak blok ve işlem filtrelemesini destekle (varsayılan: %u)</translation> </message> <message> <source>The block database contains a block which appears to be from the future. This may be due to your computer's date and time being set incorrectly. Only rebuild the block database if you are sure that your computer's date and time are correct</source> <translation>Blok veritabanı gelecek tarihli bir blok barındırıyor . Bu bilgisayar tarihinizin doğru belirlenmemiş olmasından olabilir. Block veritabanını sadece bilgisayarınızın tarihinde yanlışlık olmadığına emin olduktan sonra tekrar oluştun. </translation> </message> <message> <source>This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit &lt;https://www.openssl.org/&gt; and cryptographic software written by Eric Young and UPnP software written by Thomas Bernard.</source> <translation>Bu ürün OpenSSL Aracında &lt;https://www.openssl.org/&gt; kullanılmak üzere üretilmiş OpenSSL projesi tarafından geliştirmiş yazılım barındırmaktadır. Kriptografik yazılım Eric Young ve UPnP yazılımı Thomas Bernard tarafından geliştirilmiştir.</translation> </message> <message> <source>Total length of network version string (%i) exceeds maximum length (%i). Reduce the number or size of uacomments.</source> <translation>Ağ versiyon dizisi toplam uzunluğu(%i) maksimum uzunluğu aşıyor(%i). Yorumların büyüklüğünü veya sayısını azaltın.</translation> </message> <message> <source>Unable to bind to %s on this computer. PIVX Core is probably already running.</source> <translation>Bu bilgisayarda %s 'e bağlanılamıyor. PIVX Core muhtemelen zaten çalışıyor.</translation> </message> <message> <source>Unable to locate enough Anonsend denominated funds for this transaction.</source> <translation>Bu işlem için gereken yeterli bölünmüş gizleme fonu bulunamadı.</translation> </message> <message> <source>Unable to locate enough Anonsend non-denominated funds for this transaction that are not equal 5000 MXT.</source> <translation>5000 MXT'e eşit olmayan bu işlem için gereken yeterli bölünmemiş gizleme fonu bulunamadı.</translation> </message> <message> <source>Unable to locate enough funds for this transaction that are not equal 5000 MXT.</source> <translation>5000 MXT'e eşit olmayan bu işlem için yeterli bakiye tespit edilemedi</translation> </message> <message> <source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: %s)</source> <translation>Eşlere Tor gizli servisleri üzerinden ulaşmak için ayrı SOCKS5 proksisi kullan (varsayılan: %s)</translation> </message> <message> <source>Warning: -maxtxfee is set very high! Fees this large could be paid on a single transaction.</source> <translation>Uyarı: maxtxfee değeri çok yüksek ayarlanmış! Bu büyüklükte bir ücret ile ödeme yapılabilir.</translation> </message> <message> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Uyarı: paytxfee değeri çok yüksek ayarlanmış! Bu para gönderirken ödeyeceğiniz transfer ücretidir.</translation> </message> <message> <source>Warning: Peers are being disconnected due time differences. Please check that your computer's date and time are correct! If your clock is wrong PIVX Core will not work properly.</source> <translation>Uyarı: Zaman farkı sebebiyle eşlerin bağlantısı koptı. Bilgisayarınızın tarih ve saat ayarlarının doğruluğunu kontrol edin! Saatiniz yanlış ise PIVX Core düzgün çalışmayacaktır. </translation> </message> <message> <source>Warning: Please check that your computer's date and time are correct! If your clock is wrong PIVX Core will not work properly.</source> <translation>Uyarı: Bilgisayarınızın tarih ve saat ayarlarının doğruluğunu kontrol edin! Saatiniz yanlış ise PIVX Core düzgün çalışmayacaktır. </translation> </message> <message> <source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source> <translation>Uyarı: Ağ tamamen onaylamıyor! Bazı eşler sorun yaşıyor olabilir.</translation> </message> <message> <source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Uyarı: Diğer eşlerle anlaşamıyor gibiyiz.! Sizin yada diğer düğümlerin güncelleme yapması gerekiyor. </translation> </message> <message> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Uyarı: wallet.dat dosyası okunurken hata oluştu. Tüm anahtarlar doğru şekilde okundu, fakat işlemler bilgisi veya adres defteri girişleri eksik veya yanlış olabilir.</translation> </message> <message> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Uyarı: wallet.dat dosyası bozuk, bilgi kurtarıldı! Orjinal cüzdan wallet.{timestamp}.bak adıyla %s içine kaydedildiİ bakiyeniz veya işlemler yanlış ise yedekten yükleme yapmalısınız.</translation> </message> <message> <source>Whitelist peers connecting from the given netmask or IP address. Can be specified multiple times.</source> <translation>Verilen netmask veya IP adresinden gelen bağlantıları güvenli listeye ekle. Birden çok defa belirlenebilir.</translation> </message> <message> <source>Whitelisted peers cannot be DoS banned and their transactions are always relayed, even if they are already in the mempool, useful e.g. for a gateway</source> <translation>Güvenli liste -whitelist eşleri DoS yasaklı olamaz, önceden mompool içinde olsalar bile işlemleri herzaman iletilir, geçit için faydalı örnek </translation> </message> <message> <source>You must specify a masternodeprivkey in the configuration. Please see documentation for help.</source> <translation>Konfigürasyonda bir masternodeprivkey belirlemelisiniz.  Lütfen dokümantasyona bakınız.</translation> </message> <message> <source>(26210 could be used only on mainnet)</source> <translation>(26210 sadece mainnet üzerinde kullanılabilir)</translation> </message> <message> <source>(default: %s)</source> <translation>(default: %s)</translation> </message> <message> <source>(default: 1)</source> <translation>(default: 1)</translation> </message> <message> <source>(must be 26210 for mainnet)</source> <translation>(mainnet için 26210 olmalıdır)</translation> </message> <message> <source>Accept command line and JSON-RPC commands</source> <translation>JSON-RPC ve komut satırı komutlarını kabul et</translation> </message> <message> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Dış bağlantıları kabul et (varsayılan:1 eğer -proxy veya -connect yoksa)</translation> </message> <message> <source>Accept public REST requests (default: %u)</source> <translation>Açık REST taleplerini kabul et (varsayılan: %u)</translation> </message> <message> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Bağlanmak için bir düğüm ekle ve bağlantıyı açık tut</translation> </message> <message> <source>Adding Wrapped Serials supply...</source> <translation>Paketlenmiş seri temini ekleniyor...</translation> </message> <message> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>DNS lookup ile -addnode, -seednode ve -connect işlemlerine izin ver</translation> </message> <message> <source>Always query for peer addresses via DNS lookup (default: %u)</source> <translation>Eş adreslerini her zaman DNS lookup yoluyla ara (varsayılan: %u)</translation> </message> <message> <source>Append comment to the user agent string</source> <translation>Kullanıcı gereç dizisine yorumu ekle ekle</translation> </message> <message> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Bozuk wallet.dat dosyasından gizli anahtarları kurtar</translation> </message> <message> <source>Automatically create Tor hidden service (default: %d)</source> <translation>Tor gizli servislerini otomatik olarak oluştur (varsayılan: %d)</translation> </message> <message> <source>Block creation options:</source> <translation>Blok oluşturma seçenekleri:</translation> </message> <message> <source>Calculating missing accumulators...</source> <translation>Eksik toplayıcılar hesaplanıyor...</translation> </message> <message> <source>Cannot downgrade wallet</source> <translation>Cüzdan alt versiyona düşürülemiyor</translation> </message> <message> <source>Cannot resolve -bind address: '%s'</source> <translation>Bind adres: '%s' çözümlenemiyor</translation> </message> <message> <source>Cannot resolve -externalip address: '%s'</source> <translation>Dış IP adresi: '%s' çözümlenemiyor</translation> </message> <message> <source>Cannot resolve -whitebind address: '%s'</source> <translation>Whitebind address: '%s' çözümlenemiyor</translation> </message> <message> <source>CoinSpend: failed check</source> <translation>KoinHarcama: kontrol başarısız</translation> </message> <message> <source>Connect only to the specified node(s)</source> <translation>Sadece belirtilmiş düğüm(lere) bağlan</translation> </message> <message> <source>Connect through SOCKS5 proxy</source> <translation>SOCKS5 proxy üzerinden bağlan</translation> </message> <message> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Eş adreslerini edinmek için bir düğüme bağlan ve bağlantıyı kopar</translation> </message> <message> <source>Connection options:</source> <translation>Bağlantı seçenekleri:</translation> </message> <message> <source>Copyright (C) 2009-%i The Bitcoin Core Developers</source> <translation>Copyright (C) 2009-%i The Bitcoin Core Developers</translation> </message> <message> <source>Copyright (C) 2014-%i The Dash Core Developers</source> <translation>Copyright (C) 2014-%i The Dash Core Developers</translation> </message> <message> <source>Copyright (C) 2015-%i The PIVX Core Developers</source> <translation>Copyright (C) 2015-%i The PIVX Core Developers</translation> </message> <message> <source>Corrupted block database detected</source> <translation>Bozuk blok veritabanı tespit edildi</translation> </message> <message> <source>Could not parse masternode.conf</source> <translation>masternode.conf çözümlenemedi</translation> </message> <message> <source>Couldn't generate the accumulator witness</source> <translation>Toplayıcı tanığı -accumulator witness - oluşturulamadı</translation> </message> <message> <source>Debugging/Testing options:</source> <translation>Hata Ayıklama/Test seçenekleri:</translation> </message> <message> <source>Delete blockchain folders and resync from scratch</source> <translation>Blok zinciri klasörlerini sil ve sıfırdan senkronize et</translation> </message> <message> <source>Disable OS notifications for incoming transactions (default: %u)</source> <translation>Gelen işlemler için işletim sistemi uyarılarını devre dışı bırak (varsayılan: %u)</translation> </message> <message> <source>Disable safemode, override a real safe mode event (default: %u)</source> <translation>Güvenli modu devre dışı bırak, gerçek bir güvenli mode durumunu üstüne yaz (varsayılan: %u)</translation> </message> <message> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Kendi IP adresini tespit et (varsayılan:1 dinliyorken ve dış IP yokken)</translation> </message> <message> <source>Do not load the wallet and disable wallet RPC calls</source> <translation>Cüzdanı yükleme ve cüzdan RPC aramalarını engelle</translation> </message> <message> <source>Do you want to rebuild the block database now?</source> <translation>Blok veritabanını şimdi tekrar oluşturmak istiyor musunuz?</translation> </message> <message> <source>Done loading</source> <translation>Yükleme tamamlandı</translation> </message> <message> <source>Enable automatic Zerocoin minting (0-1, default: %u)</source> <translation>zMXT minting yapılmasını etkinleştir (0-1, varsayılan: %u)</translation> </message> <message> <source>Enable precomputation of zMXT spends and stakes (0-1, default %u)</source> <translation>zMXT harcamalarının ve stakelerinin önhesaplamasını etkinleştir (0-1, varsayılan %u)</translation> </message> <message> <source>Enable publish hash transaction (locked via FastTX) in &lt;address&gt;</source> <translation>&lt;address&gt;içindeki hash transfer yayınlamayı etkinleştir (FastTX ile kilitlenmiş)</translation> </message> <message> <source>Enable publish raw transaction (locked via FastTX) in &lt;address&gt;</source> <translation>&lt;address&gt; içindeki ham işlem yayınlamayı etkinleştir (FastTX ile kilitlenmiş)</translation> </message> <message> <source>Enable the client to act as a masternode (0-1, default: %u)</source> <translation>İstemciyi Masternode olarak hareket etmesi için etkinleştir. (0-1, varsayılan: %u)</translation> </message> <message> <source>Error initializing block database</source> <translation>Blok veritabanı başlatılırken hata oluştu</translation> </message> <message> <source>Error initializing wallet database environment %s!</source> <translation>Cüzdan veritabanı ortamı başlatılırken hata oluştu %s!</translation> </message> <message> <source>Error loading block database</source> <translation>Blok veritabanı yüklenirken hata oluştu</translation> </message> <message> <source>Error loading wallet.dat</source> <translation>Wallet.dat yüklenirken hata oluştu</translation> </message> <message> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Wallet.dat yüklenirken hata oluştu: Cüzdan dosyası bozuk</translation> </message> <message> <source>Error loading wallet.dat: Wallet requires newer version of PIVX Core</source> <translation>Wallet.dat yüklenirken hata oluştu: Cüzdan daha yeni bir PIVX Core versiyonuna ihtiyaç duyuyor</translation> </message> <message> <source>Error opening block database</source> <translation>Block veritabanını açarken hata oluştu</translation> </message> <message> <source>Error reading from database, shutting down.</source> <translation>Veritabanı okunurken hata oluştu, kapatılıyorç</translation> </message> <message> <source>Error writing zerocoinDB to disk</source> <translation>ZerocoinDB diske yazılırken hata oluştu</translation> </message> <message> <source>Error</source> <translation>hata</translation> </message> <message> <source>Error: A fatal internal error occured, see debug.log for details</source> <translation>Hata: Düzeltilemez bir hata oluştu, detaylar için debug.log dosyasına bakınız</translation> </message> <message> <source>Error: Disk space is low!</source> <translation>Hata: Disk alanı az</translation> </message> <message> <source>Error: Unsupported argument -tor found, use -onion.</source> <translation>Hata: Desteklenmeyen argüman -tor bulundu, onion kullanın.</translation> </message> <message> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Hata: Cüzdan kilitli, transfer yaratılamıyor!</translation> </message> <message> <source>Failed to calculate accumulator checkpoint</source> <translation>Toplayıcı kontrol noktası hesaplanamadı</translation> </message> <message> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Port dinleme başarısız oldu. Bunu istiyorsanız -listen=0 şeklinde kullanın</translation> </message> <message> <source>Failed to parse host:port string</source> <translation>host:port string çözümlenemedi</translation> </message> <message> <source>Failed to read block</source> <translation>Blok okuma hatası</translation> </message> <message> <source>Fee (in PIV/kB) to add to transactions you send (default: %s)</source> <translation>Gönderdiğiniz transfere (PIV/kb cinsinden) bir masraf ekleyin (varsayılan: %s) </translation> </message> <message> <source>Force safe mode (default: %u)</source> <translation>Güvenli moda zorla (varsayılan: %u)</translation> </message> <message> <source>Generate coins (default: %u)</source> <translation>Koin oluştur (varsayılan: %u)</translation> </message> <message> <source>How many blocks to check at startup (default: %u, 0 = all)</source> <translation>Başlangıçta kaç tane blok kontrol edilsin (varsayılan: %u, 0 = hepsi)</translation> </message> <message> <source>If &lt;category&gt; is not supplied, output all debugging information.</source> <translation>Eğer &lt;category&gt; sağlanamazsa, tüm hata ayıklama bilgisini yaz</translation> </message> <message> <source>Importing...</source> <translation>İçe aktarılıyor....</translation> </message> <message> <source>Imports blocks from external blk000??.dat file</source> <translation>Blok zinciri dizinini mevcut blk000 ?? dat dosyalarından yeniden oluştur.</translation> </message> <message> <source>Include IP addresses in debug output (default: %u)</source> <translation>Hata ayıklama çıktısında IP adreslerini bulundur (varsayılan: %u)</translation> </message> <message> <source>Incorrect or no genesis block found. Wrong datadir for network?</source> <translation>Genesis blok bulunamadı veya yanlış. Ağ için yanlış bilgi dizini?</translation> </message> <message> <source>Information</source> <translation>bilgi</translation> </message> <message> <source>Initialization sanity check failed. PIVX Core is shutting down.</source> <translation>Sağlama kontrolü başlatımı başarısız. PIVX Core kapatılıyor.</translation> </message> <message> <source>Insufficient funds</source> <translation>Yetersiz bakiye</translation> </message> <message> <source>Insufficient funds.</source> <translation>Yetersiz bakiye.</translation> </message> <message> <source>Invalid -onion address or hostname: '%s'</source> <translation>Geçersiz onion veya hostname: !%s!</translation> </message> <message> <source>Invalid amount for -maxtxfee=&lt;amount&gt;: '%s'</source> <translation>Maxtxfee=&lt;amount&gt;: '%s' için geçersiz tutar</translation> </message> <message> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: '%s'</source> <translation>Minrelaytxfee=&lt;amount&gt;: '%s' için geçersiz tutar</translation> </message> <message> <source>Invalid amount for -mintxfee=&lt;amount&gt;: '%s'</source> <translation>Mintxfee=&lt;amount&gt;: '%s' için geçersiz tutar</translation> </message> <message> <source>Invalid amount for -paytxfee=&lt;amount&gt;: '%s' (must be at least %s)</source> <translation>Paytxfee=&lt;amount&gt;: '%s' için geçersiz tutar (en az %s olmalıdır)</translation> </message> <message> <source>Invalid amount for -paytxfee=&lt;amount&gt;: '%s'</source> <translation>Paytxfee=&lt;amount&gt;: '%s' için geçersiz tutar</translation> </message> <message> <source>Invalid amount for -reservebalance=&lt;amount&gt;</source> <translation>Reservebalance=&lt;amount&gt; için geçersiz tutar</translation> </message> <message> <source>Invalid amount</source> <translation>Geçersiz miktar</translation> </message> <message> <source>Invalid masternodeprivkey. Please see documenation.</source> <translation>Geçersiz masternodeprivkey. Lütfen dokümantasyona bakınız.</translation> </message> <message> <source>Invalid netmask specified in -whitelist: '%s'</source> <translation>Whitelist içinde belirtilen geçersiz netmask: '%s'</translation> </message> <message> <source>Invalid port detected in masternode.conf</source> <translation>masternode.conf dosyasında geçersiz port tespit edildi</translation> </message> <message> <source>Percentage of automatically minted Zerocoin (1-100, default: %u)</source> <translation>Otomatik olarak mint edilen Zerocoin yüzdesi (1-100, varsayılan: %u)</translation> </message> <message> <source>Recalculating MXT supply...</source> <translation>PIV tedariği tekrar hesaplanıyor...</translation> </message> <message> <source>Recalculating minted ZMXT...</source> <translation>Mint edilmiş zMXT'ler tekrar hesaplanıyor...</translation> </message> <message> <source>Recalculating spent ZMXT...</source> <translation>Harcanmış zMXT'ler tekrar hesaplanıyor...</translation> </message> <message> <source>Reindex the MXT and zMXT money supply statistics</source> <translation>PIV ve zMXT para tedarik istatistiklerini tekrar endeksle</translation> </message> <message> <source>Reindexing zerocoin database...</source> <translation>Zerocoin veritabanı tekrar endeksleniyor...</translation> </message> <message> <source>Reindexing zerocoin failed</source> <translation>Zerocoin tekrar endekslemesi başarısız</translation> </message> <message> <source>Selected coins value is less than payment target</source> <translation>Seçili koinlerin değeri hedeflenen ödeme tutarından düşük</translation> </message> <message> <source>Support the zerocoin light node protocol (default: %u)</source> <translation>Zerocoin light node protocolünü destekle (varsayılan: %u)</translation> </message> <message> <source>FastTX options:</source> <translation>FastTX seçenekleri:</translation> </message> <message> <source>This is a pre-release test build - use at your own risk - do not use for staking or merchant applications!</source> <translation>Bu ön-sürümdeki bir test programıdır - riski kabullenerek kullanınız - staking ve ticaret uygulamalarında kullanmayın.</translation> </message> <message> <source> mints deleted </source> <translation>mintler silindi </translation> </message> <message> <source> mints updated, </source> <translation> mints güncellendi</translation> </message> <message> <source> unconfirmed transactions removed </source> <translation>Doğrulanmamış işlemler kaldırıldı </translation> </message> <message> <source>Disable all PIVX specific functionality (Masternodes, Zerocoin, FastTX, Budgeting) (0-1, default: %u)</source> <translation>PIVX spesifik tüm fonksiyonaliteyi devre dışı bırak (Masternodes, Zerocoin, FastTX, Budgeting) (0-1, varsayılan: %u)</translation> </message> <message> <source>Enable FastTX, show confirmations for locked transactions (bool, default: %s)</source> <translation>FastTX'i etkinleştir, kilitli işlemler için doğrulamaları göster (bool, varsayılan: %s)</translation> </message> <message> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>İşlem reddedildi! Bu, cüzdanındaki koinlerin bir kısmının harcanması durumunda olabilir; mesela, wallet.dat dosyasının bir kopyasını kullandıysanız ve koinlerinizi harcamış fakat cüzdana harcanmış olarak işaretlenmemişse ortaya çıkabilir.</translation> </message> <message> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation>Hata: Bu işlem en az %s kadar bir transfer üzreti gerektirir, sebebi tutar, karmaşıklık veya son alınmış tutar kullanımı olabilir!</translation> </message> <message> <source>Error: Unsupported argument -checklevel found. Checklevel must be level 4.</source> <translation>Hata: Desteklenmeyen argüman -checklevel bulundu. Checklevel seviyesi 4 olmalıdır.</translation> </message> <message> <source>Execute command when the best block changes and its size is over (%s in cmd is replaced by block hash, %d with the block size)</source> <translation>En iyi blok değiştirildiğinde ve büyüklüğü blok büyüklüğünü aştığında komutu çalıştırın (%s block hash tarafından cmd içinde değiştiğinde , %d blok büyüklüğü ile)</translation> </message> <message> <source>Failed to find coin set amongst held coins with less than maxNumber of Spends</source> <translation>Eldeki koinlerin içinden harcanmış maksimum sayıdan küçük koin kümesi bulunamadı. </translation> </message> <message> <source>In rare cases, a spend with 7 coins exceeds our maximum allowable transaction size, please retry spend using 6 or less coins</source> <translation>Bazı nadir durumlarda 7 koin bulunan bir harcama maksimum izin verilmiş işlem büyüklüğü sınırını aşar, lütfen 6 veya daha az koin ile tekrar deneyin</translation> </message> <message> <source>Preferred Denomination for automatically minted Zerocoin (1/5/10/50/100/500/1000/5000), 0 for no preference. default: %u)</source> <translation>Otomatik mint edilmiş Zerocoin için tercih edilen bölümler (1/5/10/50/100/500/1000/5000), tercih yoksa 0. varsayılan: %u)</translation> </message> <message> <source>Specify custom backup path to add a copy of any automatic zMXT backup. If set as dir, every backup generates a timestamped file. If set as file, will rewrite to that file every backup. If backuppath is set as well, 4 backups will happen</source> <translation>Otomatik bir zMXT yedek kopyası eklemek için bir yedekleme özel yolu belirleyin. Eğer dizin olarak belirlerseniz her yedek zaman damgası adı ile dosyalanır. Dosya olarak belirlerseniz, her yedeklemede üstüne yazar. Eğer yedek yolu da belirlenmişse, 4 yedek oluşur.</translation> </message> <message> <source>Specify custom backup path to add a copy of any wallet backup. If set as dir, every backup generates a timestamped file. If set as file, will rewrite to that file every backup.</source> <translation>Herhangi bir yedek kopyası eklemek için bir yedekleme özel yolu belirleyin. Eğer dizin olarak belirlerseniz her yedek zaman damgası adı ile dosyalanır. Dosya olarak belirlerseniz, her yedeklemede üstüne yazar.</translation> </message> <message> <source>FastTX requires inputs with at least 6 confirmations, you might need to wait a few minutes and try again.</source> <translation>FastTX en az 6 onaya sahip girdilere ihtiyaç duyar, birkaç dakika bekledikten sonra tekrar deneyin.</translation> </message> <message> <source>&lt;category&gt; can be:</source> <translation>&lt;category&gt; olabilir:</translation> </message> <message> <source>Attempt to force blockchain corruption recovery</source> <translation>Bozuk blockchaini kurtarmaya zorlama girişimi</translation> </message> <message> <source>Cannot create public spend input</source> <translation>Açık harcama girişi oluşturulamıyor</translation> </message> <message> <source>CoinSpend: Accumulator witness does not verify</source> <translation>KoinHarcama: Toplayıcı şahidi -Accumulator witness- onaylamıyor</translation> </message> <message> <source>Display the stake modifier calculations in the debug.log file.</source> <translation>Stake değiştirme hesaplamalarını debug.log dosyası içinde göster.</translation> </message> <message> <source>Display verbose coin stake messages in the debug.log file.</source> <translation>Debug.log dosyasının içinde ayrıntılı koin stake mesajlarını göster.</translation> </message> <message> <source>Enable publish hash block in &lt;address&gt;</source> <translation>&lt;address&gt;içindeki hash transfer yayınlamayı etkinleştir </translation> </message> <message> <source>Enable publish hash transaction in &lt;address&gt;</source> <translation>&lt;address&gt;içindeki hash transfer yayınlamayı etkinleştir</translation> </message> <message> <source>Enable publish raw block in &lt;address&gt;</source> <translation>&lt;address&gt; içindeki ham blok yayınlamayı etkinleştir</translation> </message> <message> <source>Enable publish raw transaction in &lt;address&gt;</source> <translation>&lt;address&gt; içindeki ham işlem yayınlamayı etkinleştir </translation> </message> <message> <source>Enable staking functionality (0-1, default: %u)</source> <translation>Stake fonksiyonunu etkinleştirin (0-1, varsayılan: %u)</translation> </message> <message> <source>Error: A fatal internal error occurred, see debug.log for details</source> <translation>Hata: Düzeltilemez bir hata oluştu, detaylar için debug.log dosyasına bakınız</translation> </message> <message> <source>Error: No valid utxo!</source> <translation>Hata: Geçerli utxo yok!</translation> </message> <message> <source>Failed to create mint</source> <translation>Mint oluşturulamadı</translation> </message> <message> <source>Failed to find Zerocoins in wallet.dat</source> <translation>Wallet.dat dosyasından Zerocoin bulunamadı.</translation> </message> <message> <source>Failed to parse public spend</source> <translation>Açık harcama çözümlemesi başarısız</translation> </message> <message> <source>Failed to select a zerocoin</source> <translation>Zerocoin seçimi başarısız</translation> </message> <message> <source>Failed to wipe zerocoinDB</source> <translation>ZerocoinDB silinmesi başarısız</translation> </message> <message> <source>Failed to write coin serial number into wallet</source> <translation>Cüzdana koin seri numarası yazımı başarısız</translation> </message> <message> <source>Keep at most &lt;n&gt; unconnectable transactions in memory (default: %u)</source> <translation>Hafızada en fazla &lt;n&gt; bağlanamaz işlem tut (varsayılan: %u)</translation> </message> <message> <source>Limit size of signature cache to &lt;n&gt; entries (default: %u)</source> <translation>İmza ön bellek büyüklüğü &lt;n&gt; girişle limitle (varsayılan: %u)</translation> </message> <message> <source>Line: %d</source> <translation>Satır: %d</translation> </message> <message> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: %u or testnet: %u)</source> <translation>JSON-RPC bağlantılarını dinle &lt;port&gt; (varsayılan: %u or testnet: %u)</translation> </message> <message> <source>Listen for connections on &lt;port&gt; (default: %u or testnet: %u)</source> <translation>Bağlantıalrı dinle &lt;port&gt; (varsayılan: %u or testnet: %u)</translation> </message> <message> <source>Loading addresses...</source> <translation>Adresler Yükleniyor...</translation> </message> <message> <source>Loading block index...</source> <translation>Blok endeksi yükleniyor...</translation> </message> <message> <source>Loading budget cache...</source> <translation>Bütçe ön belleği yükleniyor...</translation> </message> <message> <source>Loading masternode cache...</source> <translation>Masternode ön belleği yükleniyor...</translation> </message> <message> <source>Loading masternode payment cache...</source> <translation>Masternode ödeme ön belleği yükleniyor...</translation> </message> <message> <source>Loading sporks...</source> <translation>Sporklar yükleniyor...</translation> </message> <message> <source>Loading wallet... (%3.2f %%)</source> <translation>Cüzdan yükleniyor... (%3.2f%%)</translation> </message> <message> <source>Loading wallet...</source> <translation>Cüzdan Yükleniyor...</translation> </message> <message> <source>Location of the auth cookie (default: data dir)</source> <translation>Yetki çerez lokasyonu (varsayılan: bilgi dizini)</translation> </message> <message> <source>Lock masternodes from masternode configuration file (default: %u)</source> <translation>Masternode'ları masternode konfigürasyon dosyasından kilitle (varsayılan: %u)</translation> </message> <message> <source>Lookup(): Invalid -proxy address or hostname: '%s'</source> <translation>Lookup(): Geçersiz proxy adresi veya hostname: '%s'</translation> </message> <message> <source>Maintain at most &lt;n&gt; connections to peers (default: %u)</source> <translation>Eşlerle en fazla &lt;n&gt; bağlantı kur (varsayılan: %u)</translation> </message> <message> <source>Masternode options:</source> <translation>Masternode seçenekleri:</translation> </message> <message> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: %u)</source> <translation>Bağlantı başına maksimum alım ara belleği, , &lt;n&gt;*1000 bytes (varsayılan: %u)</translation> </message> <message> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: %u)</source> <translation>Bağlantı başına maksimum gönderim ara belleği, &lt;n&gt;*1000 bytes (varsayılan: %u)</translation> </message> <message> <source>Mint did not make it into blockchain</source> <translation>Mint blockchaine yazılamadı</translation> </message> <message> <source>Need to specify a port with -whitebind: '%s'</source> <translation>Whitebind ile belirlenecek bir port gerekiyor: '%s'</translation> </message> <message> <source>Node relay options:</source> <translation>Düğüm aktarım seçenekleri:</translation> </message> <message> <source>Not enough file descriptors available.</source> <translation>Yeterli dosya tanımlayıcısı bulunmuyor.</translation> </message> <message> <source>Number of automatic wallet backups (default: 10)</source> <translation>Otomatik olarak yedeklenmiş cüzdan sayısı</translation> </message> <message> <source>Number of custom location backups to retain (default: %d)</source> <translation>Tutulacak özel lokasyonlu yedek sayısı (varsayılan: %d)</translation> </message> <message> <source>Only accept block chain matching built-in checkpoints (default: %u)</source> <translation>Sadece yerleşik checkpointi olan blockchaini kabul et (varsayılan: %u)</translation> </message> <message> <source>Only connect to nodes in network &lt;net&gt; (ipv4, ipv6 or onion)</source> <translation>Ağ içinde sadece düğümlere bağlan &lt;net&gt; (ipv4, ipv6 veya onion)</translation> </message> <message> <source>Options:</source> <translation>Seçenekler:</translation> </message> <message> <source>Password for JSON-RPC connections</source> <translation>JSON-RPC bağlantıları için şifre</translation> </message> <message> <source>Unable to find transaction containing mint %s</source> <translation>İçinde mint %s bulunan işlem bulunamadı </translation> </message> <message> <source>Unable to find transaction containing mint, txHash: %s</source> <translation>İçinde mint, tcHash %s bulunan işlem bulunamadı</translation> </message> <message> <source>Use block spam filter (default: %u)</source> <translation>Blok spam filtresini kullan (varsayılan: %u)</translation> </message> <message> <source>could not get lock on cs_spendcache</source> <translation>cs_spendcache'e kilitlenilemedi</translation> </message> <message> <source>isValid(): Invalid -proxy address or hostname: '%s'</source> <translation>Lookup(): Geçersiz proxy adresi veya hostname: '%s'</translation> </message> <message> <source>Preparing for resync...</source> <translation>Tekrar taramaya hazırlanılıyor...</translation> </message> <message> <source>Need destination or change address because change is not exact</source> <translation>Fark net olmadığı için gönderim adresi veya fark adresi gerekiyor</translation> </message> <message> <source>Prepend debug output with timestamp (default: %u)</source> <translation>Hata ayıklama çıktısının başına tarih damgası ekle (varsayılan: %u)</translation> </message> <message> <source>Print version and exit</source> <translation>Versiyonu bas ve çık</translation> </message> <message> <source>Pubcoin not found in mint tx</source> <translation>Mint tx içinde Pubcoin bulunamadı</translation> </message> <message> <source>RPC server options:</source> <translation>RPC sunucu opsiyonları:</translation> </message> <message> <source>Randomly drop 1 of every &lt;n&gt; network messages</source> <translation>Her &lt;n&gt; ağ mesajından rasgele 1'ine drop işlemi yap</translation> </message> <message> <source>Randomly fuzz 1 of every &lt;n&gt; network messages</source> <translation>Her &lt;n&gt; ağ mesajından rasgele 1'ine fuzz işlemi yap</translation> </message> <message> <source>Rebuild block chain index from current blk000??.dat files</source> <translation>Blok zinciri dizinini mevcut blk000 ?? dat dosyalarından yeniden oluştur.</translation> </message> <message> <source>Receive and display P2P network alerts (default: %u)</source> <translation>P2P ağ uyarılarını al ve göster (varsayılan:%u)</translation> </message> <message> <source>Reindex the accumulator database</source> <translation>Toplayıcı veritabanını tekrar endeksle</translation> </message> <message> <source>Relay and mine data carrier transactions (default: %u)</source> <translation>Bilgi taşıyan işlemleri mine et ve aktar (varsayılan: %u)</translation> </message> <message> <source>Relay non-P2SH multisig (default: %u)</source> <translation>P2SH multisig olmayanlarları aktar (varsayılan: %u)</translation> </message> <message> <source>Rescan the block chain for missing wallet transactions</source> <translation>Eksik cüzdan transferleri için blockchaini tekrar tara</translation> </message> <message> <source>Rescanning...</source> <translation>Yeniden taranıyor...</translation> </message> <message> <source>ResetMintZerocoin finished: </source> <translation>ResetMintZerocoin tamamlandı: </translation> </message> <message> <source>ResetSpentZerocoin finished: </source> <translation>ResetSpentZerocoin tamamlandı: </translation> </message> <message> <source>Run a thread to flush wallet periodically (default: %u)</source> <translation>Cüzdanı periyodik olarak düzenlemek için bir işlem çalıştır (varsayılan: %u)</translation> </message> <message> <source>Run in the background as a daemon and accept commands</source> <translation>Arka planda çalış ve komutları kabul et</translation> </message> <message> <source>Send transactions as zero-fee transactions if possible (default: %u)</source> <translation>Mümkünse transferleri sıfır-maliyetli olarak gönder (varsayılan: %u)</translation> </message> <message> <source>Session timed out.</source> <translation>Oturum zaman aşımına uğradı</translation> </message> <message> <source>Set database cache size in megabytes (%d to %d, default: %d)</source> <translation>Veritabanı ön bellek büyüklüğünü megabyte cinsinden belirle (%d 'den %d 'ye, varsayılan: %d)</translation> </message> <message> <source>Set external address:port to get to this masternode (example: %s)</source> <translation>Dış adres belirle: bu masternode ulaşmak için port aç (örnek: %s)</translation> </message> <message> <source>Set key pool size to &lt;n&gt; (default: %u)</source> <translation>Anahtar havuz büyüklüğünü belirle &lt;n&gt; (varsayılan: %u)</translation> </message> <message> <source>Set maximum block size in bytes (default: %d)</source> <translation>Maksimum blok büyüklüğünü belirle (varsayılan: %d)</translation> </message> <message> <source>Set minimum block size in bytes (default: %u)</source> <translation>Minimum blok büyüklüğünü belirle (varsayılan: %u)</translation> </message> <message> <source>Set the Maximum reorg depth (default: %u)</source> <translation>Maksimum reorg derinliğini belirle (varsayılan: %u)</translation> </message> <message> <source>Set the masternode private key</source> <translation>Masternode gizli anahtarı</translation> </message> <message> <source>Set the number of threads to service RPC calls (default: %d)</source> <translation>RPC aramalarına hizmet edecek parça sayısını belirle (varsayılan: %d)</translation> </message> <message> <source>Sets the DB_PRIVATE flag in the wallet db environment (default: %u)</source> <translation>Cüzdan ortamında DB_PRIVATE işaretinini belirler (varsayılan: %u)</translation> </message> <message> <source>Show all debugging options (usage: --help -help-debug)</source> <translation>Tüm hata ayıklama seçeneklerini göster (kullanım: -help hata ayıklama yardımı)</translation> </message> <message> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Program başlatılırken debug.log dosyasını sıkıştır. (varsayılan: 1 debug olmadığında)</translation> </message> <message> <source>Signing timed out.</source> <translation>İmzalama zaman aşımına uğradı</translation> </message> <message> <source>Signing transaction failed</source> <translation>İşlem imzalama başarısız</translation> </message> <message> <source>Specify configuration file (default: %s)</source> <translation>Konfigürasyon dosyası belirleyin (varsayılan: %s)</translation> </message> <message> <source>Specify connection timeout in milliseconds (minimum: 1, default: %d)</source> <translation>Bağlantı süre dolum değerini milisaniye cinsinden belirtin (minimu:1, varsayılan: %d)</translation> </message> <message> <source>Specify data directory</source> <translation>Bilgi dizini belirleyin</translation> </message> <message> <source>Specify masternode configuration file (default: %s)</source> <translation>Masternode konfigürasyon dosyası belirleyin (varsayılan:%s)</translation> </message> <message> <source>Specify pid file (default: %s)</source> <translation>pid dosyası belirleyin (varsayılan:%s)</translation> </message> <message> <source>Specify wallet file (within data directory)</source> <translation>Cüzdan dosyasını belirleyin (bilgi dizini içinde)</translation> </message> <message> <source>Specify your own public address</source> <translation>Herkese açık adresinizi belirleyin</translation> </message> <message> <source>Spend Valid</source> <translation>Harcama Geçerli</translation> </message> <message> <source>Spend unconfirmed change when sending transactions (default: %u)</source> <translation>Transfer yapılırken onaylanmamış farkları kullan (varsayılan: %u)</translation> </message> <message> <source>Staking options:</source> <translation>Staking seçenekleri:</translation> </message> <message> <source>Stop running after importing blocks from disk (default: %u)</source> <translation>Disk üzerinden bloklar içer aktarıldıktan sonra çalışmayı durdur (varsayılan: %u)</translation> </message> <message> <source>Synchronization failed</source> <translation>Eşzamanlama başarısız oldu</translation> </message> <message> <source>Synchronization finished</source> <translation>Eşzamanlama tamamlandı</translation> </message> <message> <source>Synchronization pending...</source> <translation>Eşzamanlama bekleniyor</translation> </message> <message> <source>Synchronizing budgets...</source> <translation>Bütçe eşzamanlanıyor..</translation> </message> <message> <source>Synchronizing masternode winners...</source> <translation>Masternode kazananları eşzamanlanıyor..</translation> </message> <message> <source>Synchronizing masternodes...</source> <translation>Masternode'lar eşzamanlanıyor..</translation> </message> <message> <source>Synchronizing sporks...</source> <translation>Sporklar eşzamanlanıyor..</translation> </message> <message> <source>Syncing zMXT wallet...</source> <translation>zMXT cüzdanı eşzamanlanıyor..</translation> </message> <message> <source>The coin spend has been used</source> <translation>Koin harcaması kullanıldı</translation> </message> <message> <source>The transaction did not verify</source> <translation>Bu ödeme onaylanmadı</translation> </message> <message> <source>This help message</source> <translation>Bu yardım mesajı</translation> </message> <message> <source>This is experimental software.</source> <translation>Bu deneysel bir yazılımdır.</translation> </message> <message> <source>This is intended for regression testing tools and app development.</source> <translation>Regresyon test araçları ve uygulama geliştirme amaçlıdır.</translation> </message> <message> <source>Threshold for disconnecting misbehaving peers (default: %u)</source> <translation>Uyumsuz davranan eşlerden bağı koparma eşiği (varsayılan: %u)</translation> </message> <message> <source>Too many spends needed</source> <translation>Çok fazla harcama gerekiyor</translation> </message> <message> <source>Tor control port password (default: empty)</source> <translation>Tor port kontrol şifresi (varsayılan: boş)</translation> </message> <message> <source>Tor control port to use if onion listening enabled (default: %s)</source> <translation>Onion dinleme aktifse Tor port kontrol kullanımı (varsayılan: %s)</translation> </message> <message> <source>Transaction Created</source> <translation>Ödeme yaratıldı</translation> </message> <message> <source>Transaction Mint Started</source> <translation>Mint işlemi başlatıldı</translation> </message> <message> <source>Transaction amount too small</source> <translation>İşlem miktarı çok düşük</translation> </message> <message> <source>Transaction amounts must be positive</source> <translation>Ödeme tutarı artı bir değer olmalıdır</translation> </message> <message> <source>Transaction too large for fee policy</source> <translation>Masraf kuralları açısından ödeme çok büyük</translation> </message> <message> <source>Transaction too large</source> <translation>Ödeme çok büyük</translation> </message> <message> <source>Trying to spend an already spent serial #, try again.</source> <translation>Önceden harcanmış bir seri # harcanmaya çalışılıyor, tekrar deneyiniz.</translation> </message> <message> <source>Unable to bind to %s on this computer (bind returned error %s)</source> <translation>Bu bilgisayarda %s 'e bağlanılamıyor (bağlantıdan dönen hata %s )</translation> </message> <message> <source>Unable to generate initial key</source> <translation>Başlangıç anahtarı oluşturulamadı</translation> </message> <message> <source>Unable to sign spork message, wrong key?</source> <translation>Spork mesajı imzalanamıyor, yanlış anahtar?</translation> </message> <message> <source>Unable to start HTTP server. See debug log for details.</source> <translation>HTTP sunucusu başlatılamıyor. Detaylar için hata ayıklama loguna bakın.</translation> </message> <message> <source>Unknown network specified in -onlynet: '%s'</source> <translation>Onlynet: '%s' içinde belirtilen bilinmeyen ağ</translation> </message> <message> <source>Upgrade wallet to latest format</source> <translation>Cüzdanı son formata güncelleyin</translation> </message> <message> <source>Use UPnP to map the listening port (default: %u)</source> <translation>Dinleme portunu belirlemek için UPnP kullanın (varsayılan: %u)</translation> </message> <message> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Dinleme portunu belirlemek için UPnP kullanın (varsayılan: 1 dinlerken)</translation> </message> <message> <source>Use a custom max chain reorganization depth (default: %u)</source> <translation>Özel bir maksimum zincir re-organizasyon derinliği kullanın (varsayılan: %u)</translation> </message> <message> <source>Use the test network</source> <translation>Test ağını kullanın</translation> </message> <message> <source>User Agent comment (%s) contains unsafe characters.</source> <translation>Kullanıcı aktör yorumu (%s) güvenli olmayan karakterler içermektedir.</translation> </message> <message> <source>Username for JSON-RPC connections</source> <translation>JSON-RPC bağlantısı için kullanıcı adı</translation> </message> <message> <source>Value is below the smallest available denomination (= 1) of zMXT</source> <translation>Aşağıdaki değer zMXT'in mümkün olan en küçük bölüm değeridir (=1)</translation> </message> <message> <source>Verifying blocks...</source> <translation>Bloklar doğrulanıyor...</translation> </message> <message> <source>Verifying wallet...</source> <translation>Cüzdan doğrulanıyor..</translation> </message> <message> <source>Wallet %s resides outside data directory %s</source> <translation>Cüzdan %s bilgi dizini %s dışında bulunuyor</translation> </message> <message> <source>Wallet needed to be rewritten: restart PIVX Core to complete</source> <translation>Cüzdanın tekrar yazılması gerekiyor: tamamlamak için PIVX Core uygulamasını tekrar başlatın.</translation> </message> <message> <source>Wallet options:</source> <translation>Cüzdan seçenekleri:</translation> </message> <message> <source>Wallet window title</source> <translation>Cüzdan pencere başlığı</translation> </message> <message> <source>Warning</source> <translation>Uyarı</translation> </message> <message> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Uyarı: Bu versiyonun süresi geçmiştir, yükseltme zorunludur!</translation> </message> <message> <source>Warning: Unsupported argument -benchmark ignored, use -debug=bench.</source> <translation>Hata: Desteklenmeyen argüman -benchmark yok sayıldı, -debug=bench kullanın.</translation> </message> <message> <source>Warning: Unsupported argument -debugnet ignored, use -debug=net.</source> <translation>Hata: Desteklenmeyen argüman -debugnet yok sayıldı, -debug=net kullanın.</translation> </message> <message> <source>You don't have enough Zerocoins in your wallet</source> <translation>Cüzdanınızda yeterli miktarda Zerocoin bulunmuyor</translation> </message> <message> <source>You need to rebuild the database using -reindex to change -txindex</source> <translation>Tx endeksini değiştirmek için veritabanını -reindex kullanarak tekrar oluşturmanız gerekmektedir </translation> </message> <message> <source>Zapping all transactions from wallet...</source> <translation>Cüzdandaki tüm işlemler aranıyor</translation> </message> <message> <source>ZeroMQ notification options:</source> <translation>ZeroMQ bildirim seçenekleri:</translation> </message> <message> <source>Zerocoin options:</source> <translation>Zerocoin seçenekleri:</translation> </message> <message> <source>on startup</source> <translation>Başlangıçta</translation> </message> <message> <source>wallet.dat corrupt, salvage failed</source> <translation>wallte.dat dosyası bozuk, kurtarma başarız</translation> </message> </context> <context> <name>send</name> <message> <source>Form</source> <translation>Form</translation> </message> <message> <source>Send</source> <translation>Gönder</translation> </message> <message> <source>TextLabel</source> <translation>TextLabel</translation> </message> <message> <source>PushButton</source> <translation>Butona Basın</translation> </message> </context> </TS><|fim▁end|>
<|file_name|>ej.culture.az-Latn-AZ.js<|end_file_name|><|fim▁begin|>ej.addCulture( "az-Latn-AZ", { name: "az-Latn-AZ", englishName: "Azerbaijani (Latin, Azerbaijan)", nativeName: "Azərbaycan dili (Azərbaycan)", language: "az-Latn", numberFormat: { ",": " ", ".": ",", percent: { pattern: ["-n%","n%"], ",": " ", ".": "," }, currency: { pattern: ["-n $","n $"], ",": " ", ".": ",", symbol: "manat" } }, calendars: { standard: { "/": ".", firstDay: 1, days: { names: ["bazar","Bazar ertəsi","çərşənbə axşamı","çərşənbə","Cümə axşamı","Cümə","şənbə"], namesAbbr: ["B","Be","Ça","Ç","Ca","C","Ş"], namesShort: ["B","Be","Ça","Ç","Ca","C","Ş"] }, months: { names: ["yanvar","fevral","mart","aprel","may","iyun","iyul","avgust","sentyabr","oktyabr","noyabr","dekabr",""], namesAbbr: ["Yan","Fev","Mar","Apr","May","İyun","İyul","Avg","Sen","Okt","Noy","Dek",""] }, AM: null, PM: null, patterns: { d: "dd.MM.yyyy", D: "dd MMMM yyyy'-cü il'", t: "HH:mm", T: "HH:mm:ss", f: "dd MMMM yyyy'-cü il' HH:mm", F: "dd MMMM yyyy'-cü il' HH:mm:ss", M: "d MMMM" } }, Hijri: { name: "Hijri", "/": ".", firstDay: 1, days: { names: ["Bazar","Bazar ertəsi","Çərşənbə axşamı","Çərşənbə","Cümə axşamı","Cümə","Şənbə"], namesAbbr: ["B","Be","Ça","Ç","Ca","C","Ş"], namesShort: ["B","Be","Ça","Ç","Ca","C","Ş"] }, months: { names: ["Məhərrəm","Səfər","Rəbiüləvvəl","Rəbiülaxır","Cəmadiyələvvəl","Cəmadiyəlaxır","Rəcəb","Şaban","Ramazan","Şəvval","Zilqədə","Zilhiccə",""], namesAbbr: ["Məhərrəm","Səfər","Rəbiüləvvəl","Rəbiülaxır","Cəmadiyələvvəl","Cəmadiyəlaxır","Rəcəb","Şaban","Ramazan","Şəvval","Zilqədə","Zilhiccə",""] }, AM: null, PM: null, twoDigitYearMax: 1451, patterns: { d: "dd.MM.yyyy", D: "d MMMM yyyy", t: "HH:mm", T: "HH:mm:ss", f: "d MMMM yyyy HH:mm", F: "d MMMM yyyy HH:mm:ss", M: "d MMMM" }, convert: { // Adapted to Script from System.Globalization.HijriCalendar ticks1970: 62135596800000, // number of days leading up to each month monthDays: [0, 30, 59, 89, 118, 148, 177, 207, 236, 266, 295, 325, 355], minDate: -42521673600000, maxDate: 253402300799999, // The number of days to add or subtract from the calendar to accommodate the variances // in the start and the end of Ramadan and to accommodate the date difference between // countries/regions. May be dynamically adjusted based on user preference, but should // remain in the range of -2 to 2, inclusive. hijriAdjustment: 0, toGregorian: function(hyear, hmonth, hday) { var daysSinceJan0101 = this.daysToYear(hyear) + this.monthDays[hmonth] + hday - 1 - this.hijriAdjustment; // 86400000 = ticks per day var gdate = new Date(daysSinceJan0101 * 86400000 - this.ticks1970); // adjust for timezone, because we are interested in the gregorian date for the same timezone // but ticks in javascript is always from GMT, unlike the server were ticks counts from the base // date in the current timezone. gdate.setMinutes(gdate.getMinutes() + gdate.getTimezoneOffset()); return gdate; }, fromGregorian: function(gdate) { if ((gdate < this.minDate) || (gdate > this.maxDate)) return null; var ticks = this.ticks1970 + (gdate-0) - gdate.getTimezoneOffset() * 60000, daysSinceJan0101 = Math.floor(ticks / 86400000) + 1 + this.hijriAdjustment; // very particular formula determined by someone smart, adapted from the server-side implementation. // it approximates the hijri year. var hday, hmonth, hyear = Math.floor(((daysSinceJan0101 - 227013) * 30) / 10631) + 1, absDays = this.daysToYear(hyear), daysInYear = this.isLeapYear(hyear) ? 355 : 354; // hyear is just approximate, it may need adjustment up or down by 1. if (daysSinceJan0101 < absDays) { hyear--; absDays -= daysInYear; } else if (daysSinceJan0101 === absDays) { hyear--; absDays = this.daysToYear(hyear); } else { if (daysSinceJan0101 > (absDays + daysInYear)) { absDays += daysInYear; hyear++; } }<|fim▁hole|> while (hmonth <= 11 && daysIntoYear > this.monthDays[hmonth]) { hmonth++; } hmonth--; hday = daysIntoYear - this.monthDays[hmonth]; return [hyear, hmonth, hday]; }, daysToYear: function(year) { // calculates how many days since Jan 1, 0001 var yearsToYear30 = Math.floor((year - 1) / 30) * 30, yearsInto30 = year - yearsToYear30 - 1, days = Math.floor((yearsToYear30 * 10631) / 30) + 227013; while (yearsInto30 > 0) { days += (this.isLeapYear(yearsInto30) ? 355 : 354); yearsInto30--; } return days; }, isLeapYear: function(year) { return ((((year * 11) + 14) % 30) < 11); } } } } });<|fim▁end|>
// determine month by looking at how many days into the hyear we are // monthDays contains the number of days up to each month. hmonth = 0; var daysIntoYear = daysSinceJan0101 - absDays;
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use error::ErrorResponse; /// Result type for a ClientRequest. pub type ClientResult<T> = Result<T, ClientError>; /// Errors occuring as the result of a ClientRequest. #[derive(Debug, Clone, PartialEq, Eq)] pub enum ClientError { /// Request timeout reached. MaxTimeout, /// Request length exceeded the packet length. MaxLength, /// Client shut down the request client. ClientShutdown, /// Server sent us an invalid message. ServerError, /// Requested to send from IPv4 to IPv6 or vice versa. IPVersionMismatch, /// Server returned an error message. ServerMessage(ErrorResponse<'static>),<|fim▁hole|>}<|fim▁end|>
<|file_name|>Text.js<|end_file_name|><|fim▁begin|>var PixiText = require('../../lib/pixi/src/core/text/Text'), utils = require('../core/utils'), math = require('../../lib/pixi/src/core/math'), Sprite = require('../display/Sprite'), CONST = require('../core/const'); function Text(text, style, resolution){ this._init(text, style, resolution); } Text.prototype = Object.create(PixiText.prototype); Text.prototype.constructor = Text; Text.fontPropertiesCache = {}; Text.fontPropertiesCanvas = document.createElement('canvas'); Text.fontPropertiesContext = Text.fontPropertiesCanvas.getContext('2d'); Text.prototype._init = function(text, style, resolution){ text = text || ' '; PixiText.call(this, text, style, resolution); this.speed = new math.Point(); this.anchor = new math.Point(0.5, 0.5); this.pivot = new math.Point(0.5, 0.5); }; Text.prototype.displayObjectUpdateTransform = function(){ // create some matrix refs for easy access var pt = this.parent.worldTransform; var wt = this.worldTransform; //anchor, pivot, and flip variables var sx = (this.flipX) ? -this.scale.x : this.scale.x, sy = (this.flipY) ? -this.scale.y : this.scale.y, ax = (this.flipX) ? 1-this.anchor.x : this.anchor.x, ay = (this.flipY) ? 1-this.anchor.y : this.anchor.y, px = (this.flipX) ? 1-this.pivot.x : this.pivot.x, py = (this.flipY) ? 1-this.pivot.y : this.pivot.y; // temporary matrix variables var a, b, c, d, tx, ty; //Avoid use _width or _height when are 0 if(!this._width||!this._height){ this._width = this.width/this.scale.x; this._height = this.height/this.scale.y; } var anchorWidth = ax * this._width * sx, anchorHeight = ay * this._height * sy, pivotWidth = px * this._width * sx, pivotHeight = py * this._height * sy; // so if rotation is between 0 then we can simplify the multiplication process... if (this.rotation % CONST.PI_2) { // check to see if the rotation is the same as the previous render. This means we only need to use sin and cos when rotation actually changes if (this.rotation !== this.rotationCache) { this.rotationCache = this.rotation; this._sr = Math.sin(this.rotation); this._cr = Math.cos(this.rotation); } // get the matrix values of the displayobject based on its transform properties.. a = this._cr * sx; b = this._sr * sx; c = -this._sr * sy; d = this._cr * sy; tx = this.position.x + pivotWidth - anchorWidth; ty = this.position.y + pivotHeight - anchorHeight; if (pivotWidth || pivotHeight) { tx -= pivotWidth * this._cr + pivotHeight * -this._sr; ty -= pivotWidth * this._sr + pivotHeight * this._cr; } // concat the parent matrix with the objects transform. wt.a = a * pt.a + b * pt.c; wt.b = a * pt.b + b * pt.d; wt.c = c * pt.a + d * pt.c; wt.d = c * pt.b + d * pt.d; wt.tx = tx * pt.a + ty * pt.c + pt.tx; wt.ty = tx * pt.b + ty * pt.d + pt.ty; } else { // lets do the fast version as we know there is no rotation.. a = sx; d = sy; tx = this.position.x - anchorWidth; ty = this.position.y - anchorHeight; wt.a = a * pt.a; wt.b = a * pt.b; wt.c = d * pt.c; wt.d = d * pt.d; wt.tx = tx * pt.a + ty * pt.c + pt.tx; wt.ty = tx * pt.b + ty * pt.d + pt.ty; } // multiply the alphas.. this.worldAlpha = this.alpha * this.parent.worldAlpha; // reset the bounds each time this is called! this._currentBounds = null; }; Text.prototype._renderCanvas = function (renderer) { if (this.dirty) { // this.resolution = 1//renderer.resolution; this.updateText(); } //Sprite.prototype._renderCanvas.call(this, renderer); this._customRenderCanvas(renderer); }; Text.prototype._customRenderCanvas = function(renderer){ if (this.texture.crop.width <= 0 || this.texture.crop.height <= 0) { return; } if (this.blendMode !== renderer.currentBlendMode) { renderer.currentBlendMode = this.blendMode; renderer.context.globalCompositeOperation = renderer.blendModes[renderer.currentBlendMode]; } // Ignore null sources if (this.texture.valid) { var texture = this._texture, wt = this.worldTransform, dx, dy, width, height; var resolution = texture.baseTexture.resolution / renderer.resolution; renderer.context.globalAlpha = this.worldAlpha; // If smoothingEnabled is supported and we need to change the smoothing property for this texture if (renderer.smoothProperty && renderer.currentScaleMode !== texture.baseTexture.scaleMode) { renderer.currentScaleMode = texture.baseTexture.scaleMode; renderer.context[renderer.smoothProperty] = (renderer.currentScaleMode === CONST.SCALE_MODES.LINEAR); } // If the texture is trimmed we offset by the trim x/y, otherwise we use the frame dimensions if(texture.rotate) { // cheeky rotation! var a = wt.a; var b = wt.b; wt.a = -wt.c; wt.b = -wt.d; wt.c = a; wt.d = b; width = texture.crop.height; //TODO: Width assigned to height??? height = texture.crop.width; dx = (texture.trim) ? texture.trim.y - this.anchor.y * texture.trim.height : this.anchor.y * -texture._frame.height; dy = (texture.trim) ? texture.trim.x - this.anchor.x * texture.trim.width : this.anchor.x * -texture._frame.width; } else { width = texture.crop.width; height = texture.crop.height; dx = (texture.trim) ? texture.trim.x - this.anchor.x * texture.trim.width : this.anchor.x * -texture._frame.width; dy = (texture.trim) ? texture.trim.y - this.anchor.y * texture.trim.height : this.anchor.y * -texture._frame.height; } // Allow for pixel rounding if (renderer.roundPixels) { renderer.context.setTransform( wt.a, wt.b, wt.c, wt.d, (wt.tx * renderer.resolution) | 0, (wt.ty * renderer.resolution) | 0 ); dx = dx | 0; dy = dy | 0; } else { renderer.context.setTransform( wt.a, wt.b, wt.c, wt.d, wt.tx * renderer.resolution, wt.ty * renderer.resolution ); } var anchorWidth = this.anchor.x * this._width/resolution, anchorHeight = this.anchor.y * this._height/resolution; if (this.tint !== 0xFFFFFF) { if (this.cachedTint !== this.tint) { this.cachedTint = this.tint; // TODO clean up caching - how to clean up the caches? // TODO: dont works with spritesheets this.tintedTexture = CanvasTinter.getTintedTexture(this, this.tint); } renderer.context.drawImage( this.tintedTexture, 0, 0, width * resolution * renderer.resolution, height * resolution * renderer.resolution, dx / resolution, dy / resolution, width * renderer.resolution, height * renderer.resolution ); } else { //TODO: cuando la resolución del renderer es mayor a 1 los sprites se muestran mal renderer.context.drawImage( texture.baseTexture.source, texture.crop.x * resolution, texture.crop.y * resolution, width * resolution * renderer.resolution, height * resolution * renderer.resolution, dx / resolution + anchorWidth, dy / resolution + anchorHeight, width * renderer.resolution, height * renderer.resolution ); } } }; Text.prototype.renderWebGL = function (renderer) { if (this.dirty) { //this.resolution = 1//renderer.resolution; this.updateText(); } Sprite.prototype.renderWebGL.call(this, renderer); }; Text.prototype.updateText = function (){ var style = this._style; this.context.font = style.font;<|fim▁hole|> // preserve original text var outputText = style.wordWrap ? this.wordWrap(this._text) : this._text; // split text into lines var lines = outputText.split(/(?:\r\n|\r|\n)/); // calculate text width var lineWidths = new Array(lines.length); var maxLineWidth = 0; var fontProperties = this.determineFontProperties(style.font); for (var i = 0; i < lines.length; i++) { var lineWidth = this.context.measureText(lines[i]).width; lineWidths[i] = lineWidth; maxLineWidth = Math.max(maxLineWidth, lineWidth); } var width = maxLineWidth + style.strokeThickness; if (style.dropShadow) { width += style.dropShadowDistance; } this.canvas.width = ( width + this.context.lineWidth ) * this.resolution; // calculate text height var lineHeight = this.style.lineHeight || fontProperties.fontSize + style.strokeThickness; var height = lineHeight * lines.length; if (style.dropShadow) { height += style.dropShadowDistance; } this.canvas.height = ( height + this._style.padding * 2 ) * this.resolution; this.context.scale( this.resolution, this.resolution); if (navigator.isCocoonJS) { this.context.clearRect(0, 0, this.canvas.width, this.canvas.height); } //this.context.fillStyle="#FF0000"; //this.context.fillRect(0, 0, this.canvas.width, this.canvas.height); this.context.font = style.font; this.context.strokeStyle = (typeof style.stroke === "number") ? utils.hex2string(style.stroke) : style.stroke; this.context.lineWidth = style.strokeThickness; this.context.textBaseline = style.textBaseline; this.context.lineJoin = style.lineJoin; this.context.miterLimit = style.miterLimit; var linePositionX; var linePositionY; if (style.dropShadow) { this.context.fillStyle = style.dropShadowColor; var xShadowOffset = Math.cos(style.dropShadowAngle) * style.dropShadowDistance; var yShadowOffset = Math.sin(style.dropShadowAngle) * style.dropShadowDistance; for (i = 0; i < lines.length; i++) { linePositionX = style.strokeThickness / 2; linePositionY = (style.strokeThickness / 2 + i * lineHeight) + fontProperties.ascent; if (style.align === 'right') { linePositionX += maxLineWidth - lineWidths[i]; } else if (style.align === 'center') { linePositionX += (maxLineWidth - lineWidths[i]) / 2; } if (style.fill) { this.context.fillText(lines[i], linePositionX + xShadowOffset, linePositionY + yShadowOffset + this._style.padding); } } } //set canvas text styles this.context.fillStyle = (typeof style.fill === "number") ? utils.hex2string(style.fill) : style.fill; //draw lines line by line for (i = 0; i < lines.length; i++) { linePositionX = style.strokeThickness / 2; linePositionY = (style.strokeThickness / 2 + i * lineHeight) + fontProperties.ascent; if (style.align === 'right') { linePositionX += maxLineWidth - lineWidths[i]; } else if (style.align === 'center') { linePositionX += (maxLineWidth - lineWidths[i]) / 2; } if (style.stroke && style.strokeThickness) { this.context.strokeText(lines[i], linePositionX, linePositionY + this._style.padding); } if (style.fill) { this.context.fillText(lines[i], linePositionX, linePositionY + this._style.padding); } } this.updateTexture(); }; Text.prototype.setStyle = function(style){ this.style = style; return this; }; Text.prototype.setText = function(text, keys){ if(keys)text = utils.parseTextKeys(text, keys); this.text = text; return this; }; Text.prototype.setWordWrap = function(value){ if(value === false){ this.style.wordWrap = value; }else{ this.style.wordWrap = true; this.style.wordWrapWidth = value; } this.dirty = true; return this; }; Text.prototype.containsPoint = Sprite.prototype.containsPoint; Text.prototype.getLocalBounds = Sprite.prototype.getLocalBounds; module.exports = Text;<|fim▁end|>
// word wrap
<|file_name|>constants.py<|end_file_name|><|fim▁begin|># This file is part of the Simulation Manager project for VecNet. # For copyright and licensing information about this project, see the # NOTICE.txt and LICENSE.md files in its top-level directory; they are<|fim▁hole|># with this file, You can obtain one at http://mozilla.org/MPL/2.0/. from path import path # Root directory for tests that need to make directories where they can write output files TEST_OUTPUT_ROOT = path(__file__).dirname() / 'output'<|fim▁end|>
# available at https://github.com/vecnet/simulation-manager # # This Source Code Form is subject to the terms of the Mozilla Public # License (MPL), version 2.0. If a copy of the MPL was not distributed
<|file_name|>ncd-dcb.py<|end_file_name|><|fim▁begin|># ncd-dcb.py # # Use Cilibrasi and Vitanyi's Normalized Compression Distance # to cluster a randomly chosen sample of entries from the # Dictionary of Canadian Biography volume 1 # # wjt # http://digitalhistoryhacks.blogspot.com # # 26 jun 2007 import bz2 import random pathstring = 'C:\Documents and Settings\HP_Administrator\My Documents\digital-history-datasets\DCB-txt\DCB-v01-txt' # Function to calculate the NCD of two files def ncd(filex, filey): xbytes = open(filex, 'r').read() ybytes = open(filey, 'r').read() xybytes = xbytes + ybytes cx = bz2.compress(xbytes) cy = bz2.compress(ybytes) cxy = bz2.compress(xybytes) if len(cy) > len(cx): n = (len(cxy) - len(cx)) / float(len(cy)) else: n = (len(cxy) - len(cy)) / float(len(cx)) return n # Randomly select 100 biographies from DCB vol 1 (nos. 34123-34714) volume1 = range(34123, 34714) selection = random.sample(volume1, 100) # For each unique pair, calculate NCD outfile = open('ncd-dcb.txt', 'w') for i in range(0, len(selection)-1): print i for j in selection[i+1:]: fx = pathstring + '\\' + str(selection[i]) + '.txt' fy = pathstring + '\\' + str(j) + '.txt' outfile.write(str(selection[i]) + ", " + str(j) + ", " + str(ncd(fx, fy)) + "\n") <|fim▁hole|> outfile.close()<|fim▁end|>
<|file_name|>update_header.py<|end_file_name|><|fim▁begin|># This file is part of Indico. # Copyright (C) 2002 - 2021 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. import os import re import subprocess import sys from datetime import date import click import yaml from indico.util.console import cformat # Dictionary listing the files for which to change the header. # The key is the extension of the file (without the dot) and the value is another # dictionary containing two keys: # - 'regex' : A regular expression matching comments in the given file type # - 'format': A dictionary with the comment characters to add to the header. # There must be a `comment_start` inserted before the header, # `comment_middle` inserted at the beginning of each line except the # first and last one, and `comment_end` inserted at the end of the # header. (See the `HEADER` above) SUPPORTED_FILES = { 'py': { 'regex': re.compile(r'((^#|[\r\n]#).*)*'), 'format': {'comment_start': '#', 'comment_middle': '#', 'comment_end': ''}}, 'wsgi': { 'regex': re.compile(r'((^#|[\r\n]#).*)*'), 'format': {'comment_start': '#', 'comment_middle': '#', 'comment_end': ''}}, 'js': { 'regex': re.compile(r'/\*(.|[\r\n])*?\*/|((^//|[\r\n]//).*)*'), 'format': {'comment_start': '//', 'comment_middle': '//', 'comment_end': ''}}, 'jsx': { 'regex': re.compile(r'/\*(.|[\r\n])*?\*/|((^//|[\r\n]//).*)*'), 'format': {'comment_start': '//', 'comment_middle': '//', 'comment_end': ''}}, 'css': { 'regex': re.compile(r'/\*(.|[\r\n])*?\*/'), 'format': {'comment_start': '/*', 'comment_middle': ' *', 'comment_end': ' */'}}, 'scss': { 'regex': re.compile(r'/\*(.|[\r\n])*?\*/|((^//|[\r\n]//).*)*'), 'format': {'comment_start': '//', 'comment_middle': '//', 'comment_end': ''}},<|fim▁hole|># The substring which must be part of a comment block in order for the comment to be updated by the header. SUBSTRING = 'This file is part of' USAGE = ''' Updates all the headers in the supported files ({supported_files}). By default, all the files tracked by git in the current repository are updated to the current year. You can specify a year to update to as well as a file or directory. This will update all the supported files in the scope including those not tracked by git. If the directory does not contain any supported files (or if the file specified is not supported) nothing will be updated. '''.format(supported_files=', '.join(SUPPORTED_FILES)).strip() def _walk_to_root(path): """Yield directories starting from the given directory up to the root.""" # Based on code from python-dotenv (BSD-licensed): # https://github.com/theskumar/python-dotenv/blob/e13d957b/src/dotenv/main.py#L245 if os.path.isfile(path): path = os.path.dirname(path) last_dir = None current_dir = os.path.abspath(path) while last_dir != current_dir: yield current_dir parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir)) last_dir, current_dir = current_dir, parent_dir def _get_config(path, end_year): config = {} for dirname in _walk_to_root(path): check_path = os.path.join(dirname, 'headers.yml') if os.path.isfile(check_path): with open(check_path) as f: config.update((k, v) for k, v in yaml.safe_load(f.read()).items() if k not in config) if config.pop('root', False): break if 'start_year' not in config: click.echo('no valid headers.yml files found: start_year missing') sys.exit(1) if 'name' not in config: click.echo('no valid headers.yml files found: name missing') sys.exit(1) if 'header' not in config: click.echo('no valid headers.yml files found: header missing') sys.exit(1) config['end_year'] = end_year return config def gen_header(data): if data['start_year'] == data['end_year']: data['dates'] = data['start_year'] else: data['dates'] = '{} - {}'.format(data['start_year'], data['end_year']) return '\n'.join(line.rstrip() for line in data['header'].format(**data).strip().splitlines()) def _update_header(file_path, config, substring, regex, data, ci): found = False with open(file_path) as file_read: content = orig_content = file_read.read() if not content.strip(): return False shebang_line = None if content.startswith('#!/'): shebang_line, content = content.split('\n', 1) for match in regex.finditer(content): if substring in match.group(): found = True content = content[:match.start()] + gen_header(data | config) + content[match.end():] if shebang_line: content = shebang_line + '\n' + content if content != orig_content: msg = 'Incorrect header in {}' if ci else cformat('%{green!}Updating header of %{blue!}{}') print(msg.format(os.path.relpath(file_path))) if not ci: with open(file_path, 'w') as file_write: file_write.write(content) return True elif not found: msg = 'Missing header in {}' if ci else cformat('%{red!}Missing header%{reset} in %{blue!}{}') print(msg.format(os.path.relpath(file_path))) return True def update_header(file_path, year, ci): config = _get_config(file_path, year) ext = file_path.rsplit('.', 1)[-1] if ext not in SUPPORTED_FILES or not os.path.isfile(file_path): return False if os.path.basename(file_path)[0] == '.': return False return _update_header(file_path, config, SUBSTRING, SUPPORTED_FILES[ext]['regex'], SUPPORTED_FILES[ext]['format'], ci) def blacklisted(root, path, _cache={}): orig_path = path if path not in _cache: _cache[orig_path] = False while (path + os.path.sep).startswith(root): if os.path.exists(os.path.join(path, '.no-headers')): _cache[orig_path] = True break path = os.path.normpath(os.path.join(path, '..')) return _cache[orig_path] @click.command(help=USAGE) @click.option('--ci', is_flag=True, help='Indicate that the script is running during CI and should use a non-zero ' 'exit code unless all headers were already up to date. This also prevents ' 'files from actually being updated.') @click.option('--year', '-y', type=click.IntRange(min=1000), default=date.today().year, metavar='YEAR', help='Indicate the target year') @click.option('--path', '-p', type=click.Path(exists=True), help='Restrict updates to a specific file or directory') @click.pass_context def main(ctx, ci, year, path): error = False if path and os.path.isdir(path): if not ci: print(cformat('Updating headers to the year %{yellow!}{year}%{reset} for all the files in ' '%{yellow!}{path}%{reset}...').format(year=year, path=path)) for root, _, filenames in os.walk(path): for filename in filenames: if not blacklisted(path, root): if update_header(os.path.join(root, filename), year, ci): error = True elif path and os.path.isfile(path): if not ci: print(cformat('Updating headers to the year %{yellow!}{year}%{reset} for the file ' '%{yellow!}{file}%{reset}...').format(year=year, file=path)) if update_header(path, year, ci): error = True else: if not ci: print(cformat('Updating headers to the year %{yellow!}{year}%{reset} for all ' 'git-tracked files...').format(year=year)) try: for filepath in subprocess.check_output(['git', 'ls-files'], text=True).splitlines(): filepath = os.path.abspath(filepath) if not blacklisted(os.getcwd(), os.path.dirname(filepath)): if update_header(filepath, year, ci): error = True except subprocess.CalledProcessError: raise click.UsageError(cformat('%{red!}You must be within a git repository to run this script.')) if not error: print(cformat('%{green}\u2705 All headers are up to date')) elif ci: print(cformat('%{red}\u274C Some headers need to be updated or added')) sys.exit(1) else: print(cformat('%{yellow}\U0001F504 Some headers have been updated (or are missing)')) if __name__ == '__main__': main()<|fim▁end|>
}
<|file_name|>feature-gate-native_link_modifiers_whole_archive.rs<|end_file_name|><|fim▁begin|>#![allow(incomplete_features)] #![feature(native_link_modifiers)] #[link(name = "foo", modifiers = "+whole-archive")] //~^ ERROR: `#[link(modifiers="whole-archive")]` is unstable<|fim▁hole|>fn main() {}<|fim▁end|>
extern "C" {}
<|file_name|>App.tsx<|end_file_name|><|fim▁begin|>import React from "react"; import { HomeScreen } from "./routes"; import { ImpactIndex } from "./data"; export interface AppProps { index: ImpactIndex; } const App = (props: AppProps) => { const [terms, setTerms] = React.useState("T"); const [selected, setSelected] = React.useState<string | null>(null); return ( <div> <HomeScreen index={props.index} terms={terms} setTerms={setTerms} selected={selected} setSelected={setSelected} /> </div> ); }; <|fim▁hole|><|fim▁end|>
export default App;
<|file_name|>flexberry-enum.js<|end_file_name|><|fim▁begin|>/** @module ember-flexberry */ import Ember from 'ember'; const { getOwner } = Ember; import { enumCaptions } from 'ember-flexberry-data/utils/enum-functions'; /** Helper for get array captions of registered enum.<|fim▁hole|> @extends <a href="http://emberjs.com/api/classes/Ember.Helper.html">Ember.Helper</a> @public */ export default Ember.Helper.extend({ compute([enumName]) { let enumInstance = getOwner(this).lookup('enum:' + enumName); return enumCaptions(enumInstance); } });<|fim▁end|>
@class EnumCaptionHelper
<|file_name|>pointerHandler.ts<|end_file_name|><|fim▁begin|>/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import * as dom from 'vs/base/browser/dom'; import * as platform from 'vs/base/common/platform'; import { EventType, Gesture, GestureEvent } from 'vs/base/browser/touch'; import { IDisposable, Disposable } from 'vs/base/common/lifecycle'; import { IPointerHandlerHelper, MouseHandler, createMouseMoveEventMerger } from 'vs/editor/browser/controller/mouseHandler'; import { IMouseTarget } from 'vs/editor/browser/editorBrowser'; import { EditorMouseEvent, EditorPointerEventFactory } from 'vs/editor/browser/editorDom'; import { ViewController } from 'vs/editor/browser/view/viewController'; import { ViewContext } from 'vs/editor/common/view/viewContext'; import { BrowserFeatures } from 'vs/base/browser/canIUse'; interface IThrottledGestureEvent { translationX: number; translationY: number; } function gestureChangeEventMerger(lastEvent: IThrottledGestureEvent | null, currentEvent: MSGestureEvent): IThrottledGestureEvent { const r = { translationY: currentEvent.translationY, translationX: currentEvent.translationX }; if (lastEvent) { r.translationY += lastEvent.translationY; r.translationX += lastEvent.translationX; } return r; } /** * Basically IE10 and IE11 */ class MsPointerHandler extends MouseHandler implements IDisposable { private _lastPointerType: string; private _installGestureHandlerTimeout: number; constructor(context: ViewContext, viewController: ViewController, viewHelper: IPointerHandlerHelper) { super(context, viewController, viewHelper); (this.viewHelper.linesContentDomNode.style as any).msTouchAction = 'none'; (this.viewHelper.linesContentDomNode.style as any).msContentZooming = 'none'; // TODO@Alex -> this expects that the view is added in 100 ms, might not be the case // This handler should be added when the dom node is in the dom tree this._installGestureHandlerTimeout = window.setTimeout(() => { this._installGestureHandlerTimeout = -1; if ((<any>window).MSGesture) { const touchGesture = new MSGesture(); const penGesture = new MSGesture(); touchGesture.target = this.viewHelper.linesContentDomNode; penGesture.target = this.viewHelper.linesContentDomNode; this.viewHelper.linesContentDomNode.addEventListener(<any>'MSPointerDown', (e: MSPointerEvent) => { // Circumvent IE11 breaking change in e.pointerType & TypeScript's stale definitions const pointerType = <any>e.pointerType; if (pointerType === ((<any>e).MSPOINTER_TYPE_MOUSE || 'mouse')) { this._lastPointerType = 'mouse'; return; } else if (pointerType === ((<any>e).MSPOINTER_TYPE_TOUCH || 'touch')) { this._lastPointerType = 'touch'; touchGesture.addPointer(e.pointerId); } else { this._lastPointerType = 'pen'; penGesture.addPointer(e.pointerId); } }); this._register(dom.addDisposableThrottledListener<IThrottledGestureEvent, MSGestureEvent>(this.viewHelper.linesContentDomNode, 'MSGestureChange', (e) => this._onGestureChange(e), gestureChangeEventMerger)); this._register(dom.addDisposableListener(this.viewHelper.linesContentDomNode, 'MSGestureTap', (e) => this._onCaptureGestureTap(e), true)); } }, 100); this._lastPointerType = 'mouse'; } public _onMouseDown(e: EditorMouseEvent): void { if (this._lastPointerType === 'mouse') { super._onMouseDown(e); } } private _onCaptureGestureTap(rawEvent: MSGestureEvent): void { const e = new EditorMouseEvent(<MouseEvent><any>rawEvent, this.viewHelper.viewDomNode); const t = this._createMouseTarget(e, false); if (t.position) { this.viewController.moveTo(t.position); } // IE does not want to focus when coming in from the browser's address bar if ((<any>e.browserEvent).fromElement) { e.preventDefault(); this.viewHelper.focusTextArea(); } else { // TODO@Alex -> cancel this is focus is lost setTimeout(() => { this.viewHelper.focusTextArea(); }); } } private _onGestureChange(e: IThrottledGestureEvent): void { this._context.viewLayout.deltaScrollNow(-e.translationX, -e.translationY); } public dispose(): void { window.clearTimeout(this._installGestureHandlerTimeout); super.dispose(); } } /** * Basically Edge but should be modified to handle any pointerEnabled, even without support of MSGesture */ class StandardPointerHandler extends MouseHandler implements IDisposable { private _lastPointerType: string; private _installGestureHandlerTimeout: number; constructor(context: ViewContext, viewController: ViewController, viewHelper: IPointerHandlerHelper) { super(context, viewController, viewHelper); this.viewHelper.linesContentDomNode.style.touchAction = 'none'; // TODO@Alex -> this expects that the view is added in 100 ms, might not be the case // This handler should be added when the dom node is in the dom tree this._installGestureHandlerTimeout = window.setTimeout(() => { this._installGestureHandlerTimeout = -1; // TODO@Alex: replace the usage of MSGesture here with something that works across all browsers if ((<any>window).MSGesture) { const touchGesture = new MSGesture(); const penGesture = new MSGesture(); touchGesture.target = this.viewHelper.linesContentDomNode; penGesture.target = this.viewHelper.linesContentDomNode; this.viewHelper.linesContentDomNode.addEventListener('pointerdown', (e: PointerEvent) => { const pointerType = <any>e.pointerType; if (pointerType === 'mouse') { this._lastPointerType = 'mouse'; return; } else if (pointerType === 'touch') { this._lastPointerType = 'touch'; touchGesture.addPointer(e.pointerId); } else {<|fim▁hole|> penGesture.addPointer(e.pointerId); } }); this._register(dom.addDisposableThrottledListener<IThrottledGestureEvent, MSGestureEvent>(this.viewHelper.linesContentDomNode, 'MSGestureChange', (e) => this._onGestureChange(e), gestureChangeEventMerger)); this._register(dom.addDisposableListener(this.viewHelper.linesContentDomNode, 'MSGestureTap', (e) => this._onCaptureGestureTap(e), true)); } }, 100); this._lastPointerType = 'mouse'; } public _onMouseDown(e: EditorMouseEvent): void { if (this._lastPointerType === 'mouse') { super._onMouseDown(e); } } private _onCaptureGestureTap(rawEvent: MSGestureEvent): void { const e = new EditorMouseEvent(<MouseEvent><any>rawEvent, this.viewHelper.viewDomNode); const t = this._createMouseTarget(e, false); if (t.position) { this.viewController.moveTo(t.position); } // IE does not want to focus when coming in from the browser's address bar if ((<any>e.browserEvent).fromElement) { e.preventDefault(); this.viewHelper.focusTextArea(); } else { // TODO@Alex -> cancel this is focus is lost setTimeout(() => { this.viewHelper.focusTextArea(); }); } } private _onGestureChange(e: IThrottledGestureEvent): void { this._context.viewLayout.deltaScrollNow(-e.translationX, -e.translationY); } public dispose(): void { window.clearTimeout(this._installGestureHandlerTimeout); super.dispose(); } } /** * Currently only tested on iOS 13/ iPadOS. */ export class PointerEventHandler extends MouseHandler { private _lastPointerType: string; constructor(context: ViewContext, viewController: ViewController, viewHelper: IPointerHandlerHelper) { super(context, viewController, viewHelper); this._register(Gesture.addTarget(this.viewHelper.linesContentDomNode)); this._register(dom.addDisposableListener(this.viewHelper.linesContentDomNode, EventType.Tap, (e) => this.onTap(e))); this._register(dom.addDisposableListener(this.viewHelper.linesContentDomNode, EventType.Change, (e) => this.onChange(e))); this._register(dom.addDisposableListener(this.viewHelper.linesContentDomNode, EventType.Contextmenu, (e: MouseEvent) => this._onContextMenu(new EditorMouseEvent(e, this.viewHelper.viewDomNode), false))); this._lastPointerType = 'mouse'; this._register(dom.addDisposableListener(this.viewHelper.linesContentDomNode, 'pointerdown', (e: any) => { const pointerType = <any>e.pointerType; if (pointerType === 'mouse') { this._lastPointerType = 'mouse'; return; } else if (pointerType === 'touch') { this._lastPointerType = 'touch'; } else { this._lastPointerType = 'pen'; } })); // PonterEvents const pointerEvents = new EditorPointerEventFactory(this.viewHelper.viewDomNode); this._register(pointerEvents.onPointerMoveThrottled(this.viewHelper.viewDomNode, (e) => this._onMouseMove(e), createMouseMoveEventMerger(this.mouseTargetFactory), MouseHandler.MOUSE_MOVE_MINIMUM_TIME)); this._register(pointerEvents.onPointerUp(this.viewHelper.viewDomNode, (e) => this._onMouseUp(e))); this._register(pointerEvents.onPointerLeave(this.viewHelper.viewDomNode, (e) => this._onMouseLeave(e))); this._register(pointerEvents.onPointerDown(this.viewHelper.viewDomNode, (e) => this._onMouseDown(e))); } private onTap(event: GestureEvent): void { if (!event.initialTarget || !this.viewHelper.linesContentDomNode.contains(<any>event.initialTarget)) { return; } event.preventDefault(); this.viewHelper.focusTextArea(); const target = this._createMouseTarget(new EditorMouseEvent(event, this.viewHelper.viewDomNode), false); if (target.position) { // this.viewController.moveTo(target.position); this.viewController.dispatchMouse({ position: target.position, mouseColumn: target.position.column, startedOnLineNumbers: false, mouseDownCount: event.tapCount, inSelectionMode: false, altKey: false, ctrlKey: false, metaKey: false, shiftKey: false, leftButton: false, middleButton: false, }); } } private onChange(e: GestureEvent): void { if (this._lastPointerType === 'touch') { this._context.viewLayout.deltaScrollNow(-e.translationX, -e.translationY); } } public _onMouseDown(e: EditorMouseEvent): void { if (e.target && this.viewHelper.linesContentDomNode.contains(e.target) && this._lastPointerType === 'touch') { return; } super._onMouseDown(e); } } class TouchHandler extends MouseHandler { constructor(context: ViewContext, viewController: ViewController, viewHelper: IPointerHandlerHelper) { super(context, viewController, viewHelper); this._register(Gesture.addTarget(this.viewHelper.linesContentDomNode)); this._register(dom.addDisposableListener(this.viewHelper.linesContentDomNode, EventType.Tap, (e) => this.onTap(e))); this._register(dom.addDisposableListener(this.viewHelper.linesContentDomNode, EventType.Change, (e) => this.onChange(e))); this._register(dom.addDisposableListener(this.viewHelper.linesContentDomNode, EventType.Contextmenu, (e: MouseEvent) => this._onContextMenu(new EditorMouseEvent(e, this.viewHelper.viewDomNode), false))); } private onTap(event: GestureEvent): void { event.preventDefault(); this.viewHelper.focusTextArea(); const target = this._createMouseTarget(new EditorMouseEvent(event, this.viewHelper.viewDomNode), false); if (target.position) { this.viewController.moveTo(target.position); } } private onChange(e: GestureEvent): void { this._context.viewLayout.deltaScrollNow(-e.translationX, -e.translationY); } } export class PointerHandler extends Disposable { private readonly handler: MouseHandler; constructor(context: ViewContext, viewController: ViewController, viewHelper: IPointerHandlerHelper) { super(); if (window.navigator.msPointerEnabled) { this.handler = this._register(new MsPointerHandler(context, viewController, viewHelper)); } else if ((platform.isIOS && BrowserFeatures.pointerEvents)) { this.handler = this._register(new PointerEventHandler(context, viewController, viewHelper)); } else if ((<any>window).TouchEvent) { this.handler = this._register(new TouchHandler(context, viewController, viewHelper)); } else if (window.navigator.pointerEnabled || (<any>window).PointerEvent) { this.handler = this._register(new StandardPointerHandler(context, viewController, viewHelper)); } else { this.handler = this._register(new MouseHandler(context, viewController, viewHelper)); } } public getTargetAtClientPoint(clientX: number, clientY: number): IMouseTarget | null { return this.handler.getTargetAtClientPoint(clientX, clientY); } }<|fim▁end|>
this._lastPointerType = 'pen';
<|file_name|>Teacher-Quiz.py<|end_file_name|><|fim▁begin|># Teacher Quiz - Python Code - Elizabeth Tweedale import csv, random<|fim▁hole|> print ("Hello",str(yourName)) return yourName def getQuestions(): # getQuestions reads in the questions from a CSV file questions = [] # this creates an empty list for adding the questions to with open("SuperPythonQuiz.csv", mode="r", encoding="utf-8") as myFile: myQuiz = csv.reader(myFile) for row in myQuiz: questions.append(row) return questions def askQuestion(question,score): # askQuestion prints the question and choices to the screen then checks the answer print(question[0]) # print the question - this is in the [0] position of the row for eachChoice in question[1:-1]: # print each choice from [1] to the last position [-1] print("{0:>5}{1}".format("", eachChoice)) answer = input("Please select an answer: ") # get the student's answer if answer == question[-1]: # check if the answer matches the last position in the question, the correct answer print("Correct!") # if it's correct, tell the user and add one to the score score += 1 else: # if it's incorrect, tell the user what the correct answer was print("Incorrect, the correct answer was {0}.".format(question[-1])) return score # return the score def recordScore(studentName, score): with open("QuizResults.txt", mode="a+",encoding="utf-8") as myFile: # note the '+' sign after the a means if the file does not exist, then create it myFile.write(str(studentName) + "," + str(score) + "\n") # write name,score to the file # "\n" will add a new line to the file so that it's ready for the next name def main(): studentName = askName() # call the askName function questions = getQuestions() # call the getQuestions function score = 0 # initialise the score to 0 number = len(questions) # use the number to keep track of the total number of questions - which is the length of the 'questions' list for eachQuestion in range(number): # reppeat for each question question = random.choice(questions) # choose a random question from the questions list score = askQuestion(question,score) # ask the question and update the score questions.remove(question) # remove the current question from the list so that you don't ask it again print("Your final score is:", score, "out of:", number) # tell the user what their final score is recordScore(studentName, score) # call the recordScore function main()<|fim▁end|>
def askName(): # askName function returns the name of the student print("Welcome to the Super Python Quiz!") yourName = input("What is your name? ")
<|file_name|>base.py<|end_file_name|><|fim▁begin|>from django.contrib.messages import constants, get_level, set_level, utils from django.contrib.messages.api import MessageFailure from django.contrib.messages.constants import DEFAULT_LEVELS from django.contrib.messages.storage import base, default_storage from django.contrib.messages.storage.base import Message from django.http import HttpRequest, HttpResponse from django.test import modify_settings, override_settings from django.urls import reverse from django.utils.translation import gettext_lazy def add_level_messages(storage): """ Add 6 messages from different levels (including a custom one) to a storage instance. """ storage.add(constants.INFO, 'A generic info message') storage.add(29, 'Some custom level') storage.add(constants.DEBUG, 'A debugging message', extra_tags='extra-tag') storage.add(constants.WARNING, 'A warning') storage.add(constants.ERROR, 'An error') storage.add(constants.SUCCESS, 'This was a triumph.') class override_settings_tags(override_settings): def enable(self): super().enable() # LEVEL_TAGS is a constant defined in the # django.contrib.messages.storage.base module, so after changing # settings.MESSAGE_TAGS, update that constant also. self.old_level_tags = base.LEVEL_TAGS base.LEVEL_TAGS = utils.get_level_tags() def disable(self): super().disable() base.LEVEL_TAGS = self.old_level_tags class BaseTests: storage_class = default_storage levels = { 'debug': constants.DEBUG, 'info': constants.INFO, 'success': constants.SUCCESS, 'warning': constants.WARNING, 'error': constants.ERROR, } def setUp(self): self.settings_override = override_settings_tags( TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': ( 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ), }, }], ROOT_URLCONF='messages_tests.urls', MESSAGE_TAGS={}, MESSAGE_STORAGE='%s.%s' % (self.storage_class.__module__, self.storage_class.__name__), SESSION_SERIALIZER='django.contrib.sessions.serializers.JSONSerializer', ) self.settings_override.enable() def tearDown(self): self.settings_override.disable() def get_request(self): return HttpRequest() def get_response(self): return HttpResponse() def get_storage(self, data=None): """ Return the storage backend, setting its loaded data to the ``data`` argument. This method avoids the storage ``_get`` method from getting called so that other parts of the storage backend can be tested independent of the message retrieval logic. """ storage = self.storage_class(self.get_request()) storage._loaded_data = data or [] return storage def test_repr(self): request = self.get_request() storage = self.storage_class(request) self.assertEqual( repr(storage), f'<{self.storage_class.__qualname__}: request=<HttpRequest>>', ) def test_add(self): storage = self.get_storage() self.assertFalse(storage.added_new) storage.add(constants.INFO, 'Test message 1') self.assertTrue(storage.added_new) storage.add(constants.INFO, 'Test message 2', extra_tags='tag') self.assertEqual(len(storage), 2) def test_add_lazy_translation(self): storage = self.get_storage() response = self.get_response() storage.add(constants.INFO, gettext_lazy('lazy message')) storage.update(response) storing = self.stored_messages_count(storage, response) self.assertEqual(storing, 1) def test_no_update(self): storage = self.get_storage() response = self.get_response() storage.update(response) storing = self.stored_messages_count(storage, response) self.assertEqual(storing, 0) def test_add_update(self): storage = self.get_storage() response = self.get_response() storage.add(constants.INFO, 'Test message 1') storage.add(constants.INFO, 'Test message 1', extra_tags='tag') storage.update(response) storing = self.stored_messages_count(storage, response) self.assertEqual(storing, 2) def test_existing_add_read_update(self): storage = self.get_existing_storage() response = self.get_response() storage.add(constants.INFO, 'Test message 3') list(storage) # Simulates a read storage.update(response) storing = self.stored_messages_count(storage, response) self.assertEqual(storing, 0) def test_existing_read_add_update(self): storage = self.get_existing_storage() response = self.get_response() list(storage) # Simulates a read storage.add(constants.INFO, 'Test message 3') storage.update(response) storing = self.stored_messages_count(storage, response) self.assertEqual(storing, 1) @override_settings(MESSAGE_LEVEL=constants.DEBUG) def test_full_request_response_cycle(self): """ With the message middleware enabled, messages are properly stored and retrieved across the full request/redirect/response cycle. """ data = { 'messages': ['Test message %d' % x for x in range(5)], } show_url = reverse('show_message') for level in ('debug', 'info', 'success', 'warning', 'error'): add_url = reverse('add_message', args=(level,)) response = self.client.post(add_url, data, follow=True) self.assertRedirects(response, show_url) self.assertIn('messages', response.context) messages = [Message(self.levels[level], msg) for msg in data['messages']] self.assertEqual(list(response.context['messages']), messages) for msg in data['messages']: self.assertContains(response, msg) @override_settings(MESSAGE_LEVEL=constants.DEBUG) def test_with_template_response(self): data = { 'messages': ['Test message %d' % x for x in range(5)], } show_url = reverse('show_template_response') for level in self.levels: add_url = reverse('add_template_response', args=(level,)) response = self.client.post(add_url, data, follow=True) self.assertRedirects(response, show_url) self.assertIn('messages', response.context) for msg in data['messages']: self.assertContains(response, msg) # there shouldn't be any messages on second GET request response = self.client.get(show_url) for msg in data['messages']: self.assertNotContains(response, msg) def test_context_processor_message_levels(self): show_url = reverse('show_template_response') response = self.client.get(show_url) self.assertIn('DEFAULT_MESSAGE_LEVELS', response.context) self.assertEqual(response.context['DEFAULT_MESSAGE_LEVELS'], DEFAULT_LEVELS) @override_settings(MESSAGE_LEVEL=constants.DEBUG) def test_multiple_posts(self): """ Messages persist properly when multiple POSTs are made before a GET. """ data = { 'messages': ['Test message %d' % x for x in range(5)], } show_url = reverse('show_message') messages = [] for level in ('debug', 'info', 'success', 'warning', 'error'): messages.extend(Message(self.levels[level], msg) for msg in data['messages']) add_url = reverse('add_message', args=(level,)) self.client.post(add_url, data) response = self.client.get(show_url) self.assertIn('messages', response.context) self.assertEqual(list(response.context['messages']), messages) for msg in data['messages']: self.assertContains(response, msg) @modify_settings( INSTALLED_APPS={'remove': 'django.contrib.messages'}, MIDDLEWARE={'remove': 'django.contrib.messages.middleware.MessageMiddleware'}, ) @override_settings( MESSAGE_LEVEL=constants.DEBUG, TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, }], ) def test_middleware_disabled(self): """ When the middleware is disabled, an exception is raised when one attempts to store a message. """ data = { 'messages': ['Test message %d' % x for x in range(5)], } reverse('show_message') for level in ('debug', 'info', 'success', 'warning', 'error'): add_url = reverse('add_message', args=(level,)) with self.assertRaises(MessageFailure): self.client.post(add_url, data, follow=True) @modify_settings( INSTALLED_APPS={'remove': 'django.contrib.messages'}, MIDDLEWARE={'remove': 'django.contrib.messages.middleware.MessageMiddleware'}, ) @override_settings( TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [],<|fim▁hole|> """ When the middleware is disabled, an exception is not raised if 'fail_silently' is True. """ data = { 'messages': ['Test message %d' % x for x in range(5)], 'fail_silently': True, } show_url = reverse('show_message') for level in ('debug', 'info', 'success', 'warning', 'error'): add_url = reverse('add_message', args=(level,)) response = self.client.post(add_url, data, follow=True) self.assertRedirects(response, show_url) self.assertNotIn('messages', response.context) def stored_messages_count(self, storage, response): """ Return the number of messages being stored after a ``storage.update()`` call. """ raise NotImplementedError('This method must be set by a subclass.') def test_get(self): raise NotImplementedError('This method must be set by a subclass.') def get_existing_storage(self): return self.get_storage([ Message(constants.INFO, 'Test message 1'), Message(constants.INFO, 'Test message 2', extra_tags='tag'), ]) def test_existing_read(self): """ Reading the existing storage doesn't cause the data to be lost. """ storage = self.get_existing_storage() self.assertFalse(storage.used) # After iterating the storage engine directly, the used flag is set. data = list(storage) self.assertTrue(storage.used) # The data does not disappear because it has been iterated. self.assertEqual(data, list(storage)) def test_existing_add(self): storage = self.get_existing_storage() self.assertFalse(storage.added_new) storage.add(constants.INFO, 'Test message 3') self.assertTrue(storage.added_new) def test_default_level(self): # get_level works even with no storage on the request. request = self.get_request() self.assertEqual(get_level(request), constants.INFO) # get_level returns the default level if it hasn't been set. storage = self.get_storage() request._messages = storage self.assertEqual(get_level(request), constants.INFO) # Only messages of sufficient level get recorded. add_level_messages(storage) self.assertEqual(len(storage), 5) def test_low_level(self): request = self.get_request() storage = self.storage_class(request) request._messages = storage self.assertTrue(set_level(request, 5)) self.assertEqual(get_level(request), 5) add_level_messages(storage) self.assertEqual(len(storage), 6) def test_high_level(self): request = self.get_request() storage = self.storage_class(request) request._messages = storage self.assertTrue(set_level(request, 30)) self.assertEqual(get_level(request), 30) add_level_messages(storage) self.assertEqual(len(storage), 2) @override_settings(MESSAGE_LEVEL=29) def test_settings_level(self): request = self.get_request() storage = self.storage_class(request) self.assertEqual(get_level(request), 29) add_level_messages(storage) self.assertEqual(len(storage), 3) def test_tags(self): storage = self.get_storage() storage.level = 0 add_level_messages(storage) storage.add(constants.INFO, 'A generic info message', extra_tags=None) tags = [msg.tags for msg in storage] self.assertEqual(tags, ['info', '', 'extra-tag debug', 'warning', 'error', 'success', 'info']) def test_level_tag(self): storage = self.get_storage() storage.level = 0 add_level_messages(storage) tags = [msg.level_tag for msg in storage] self.assertEqual(tags, ['info', '', 'debug', 'warning', 'error', 'success']) @override_settings_tags(MESSAGE_TAGS={ constants.INFO: 'info', constants.DEBUG: '', constants.WARNING: '', constants.ERROR: 'bad', 29: 'custom', }) def test_custom_tags(self): storage = self.get_storage() storage.level = 0 add_level_messages(storage) tags = [msg.tags for msg in storage] self.assertEqual(tags, ['info', 'custom', 'extra-tag', '', 'bad', 'success'])<|fim▁end|>
'APP_DIRS': True, }], ) def test_middleware_disabled_fail_silently(self):
<|file_name|>requirements.py<|end_file_name|><|fim▁begin|>"""Validate requirements.""" from __future__ import annotations from collections import deque import json import operator import os import re import subprocess import sys from awesomeversion import AwesomeVersion, AwesomeVersionStrategy from stdlib_list import stdlib_list from tqdm import tqdm from homeassistant.const import REQUIRED_PYTHON_VER import homeassistant.util.package as pkg_util from script.gen_requirements_all import COMMENT_REQUIREMENTS, normalize_package_name from .model import Config, Integration IGNORE_PACKAGES = { commented.lower().replace("_", "-") for commented in COMMENT_REQUIREMENTS } PACKAGE_REGEX = re.compile( r"^(?:--.+\s)?([-_\.\w\d\[\]]+)(==|>=|<=|~=|!=|<|>|===)*(.*)$" ) PIP_REGEX = re.compile(r"^(--.+\s)?([-_\.\w\d]+.*(?:==|>=|<=|~=|!=|<|>|===)?.*$)") SUPPORTED_PYTHON_TUPLES = [ REQUIRED_PYTHON_VER[:2], tuple(map(operator.add, REQUIRED_PYTHON_VER, (0, 1, 0)))[:2], ] SUPPORTED_PYTHON_VERSIONS = [ ".".join(map(str, version_tuple)) for version_tuple in SUPPORTED_PYTHON_TUPLES ] STD_LIBS = {version: set(stdlib_list(version)) for version in SUPPORTED_PYTHON_VERSIONS} PIPDEPTREE_CACHE = None IGNORE_VIOLATIONS = { # Still has standard library requirements. "acmeda", "blink", "ezviz", "hdmi_cec", "juicenet", "lupusec", "rainbird", "slide", "suez_water", } def validate(integrations: dict[str, Integration], config: Config): """Handle requirements for integrations.""" # Check if we are doing format-only validation. if not config.requirements: for integration in integrations.values(): validate_requirements_format(integration) return ensure_cache() # check for incompatible requirements disable_tqdm = config.specific_integrations or os.environ.get("CI", False) for integration in tqdm(integrations.values(), disable=disable_tqdm): if not integration.manifest: continue validate_requirements(integration) def validate_requirements_format(integration: Integration) -> bool: """Validate requirements format. Returns if valid. """ start_errors = len(integration.errors) for req in integration.requirements: if " " in req: integration.add_error( "requirements", f'Requirement "{req}" contains a space', ) continue pkg, sep, version = PACKAGE_REGEX.match(req).groups() if integration.core and sep != "==": integration.add_error( "requirements", f'Requirement {req} need to be pinned "<pkg name>==<version>".', ) continue if ( version and AwesomeVersion(version).strategy == AwesomeVersionStrategy.UNKNOWN ): integration.add_error( "requirements", f"Unable to parse package version ({version}) for {pkg}.", ) continue return len(integration.errors) == start_errors def validate_requirements(integration: Integration): """Validate requirements.""" if not validate_requirements_format(integration): return # Some integrations have not been fixed yet so are allowed to have violations. if integration.domain in IGNORE_VIOLATIONS: return integration_requirements = set() integration_packages = set() for req in integration.requirements: package = normalize_package_name(req) if not package: integration.add_error(<|fim▁hole|> "requirements", f"Failed to normalize package name from requirement {req}", ) return if (package == ign for ign in IGNORE_PACKAGES): continue integration_requirements.add(req) integration_packages.add(package) if integration.disabled: return install_ok = install_requirements(integration, integration_requirements) if not install_ok: return all_integration_requirements = get_requirements(integration, integration_packages) if integration_requirements and not all_integration_requirements: integration.add_error( "requirements", f"Failed to resolve requirements {integration_requirements}", ) return # Check for requirements incompatible with standard library. for version, std_libs in STD_LIBS.items(): for req in all_integration_requirements: if req in std_libs: integration.add_error( "requirements", f"Package {req} is not compatible with Python {version} standard library", ) def ensure_cache(): """Ensure we have a cache of pipdeptree. { "flake8-docstring": { "key": "flake8-docstrings", "package_name": "flake8-docstrings", "installed_version": "1.5.0" "dependencies": {"flake8"} } } """ global PIPDEPTREE_CACHE if PIPDEPTREE_CACHE is not None: return cache = {} for item in json.loads( subprocess.run( ["pipdeptree", "-w", "silence", "--json"], check=True, capture_output=True, text=True, ).stdout ): cache[item["package"]["key"]] = { **item["package"], "dependencies": {dep["key"] for dep in item["dependencies"]}, } PIPDEPTREE_CACHE = cache def get_requirements(integration: Integration, packages: set[str]) -> set[str]: """Return all (recursively) requirements for an integration.""" ensure_cache() all_requirements = set() to_check = deque(packages) while to_check: package = to_check.popleft() if package in all_requirements: continue all_requirements.add(package) item = PIPDEPTREE_CACHE.get(package) if item is None: # Only warn if direct dependencies could not be resolved if package in packages: integration.add_error( "requirements", f"Failed to resolve requirements for {package}" ) continue to_check.extend(item["dependencies"]) return all_requirements def install_requirements(integration: Integration, requirements: set[str]) -> bool: """Install integration requirements. Return True if successful. """ global PIPDEPTREE_CACHE ensure_cache() for req in requirements: match = PIP_REGEX.search(req) if not match: integration.add_error( "requirements", f"Failed to parse requirement {req} before installation", ) continue install_args = match.group(1) requirement_arg = match.group(2) is_installed = False normalized = normalize_package_name(requirement_arg) if normalized and "==" in requirement_arg: ver = requirement_arg.split("==")[-1] item = PIPDEPTREE_CACHE.get(normalized) is_installed = item and item["installed_version"] == ver if not is_installed: try: is_installed = pkg_util.is_installed(req) except ValueError: is_installed = False if is_installed: continue args = [sys.executable, "-m", "pip", "install", "--quiet"] if install_args: args.append(install_args) args.append(requirement_arg) try: result = subprocess.run(args, check=True, capture_output=True, text=True) except subprocess.SubprocessError: integration.add_error( "requirements", f"Requirement {req} failed to install", ) else: # Clear the pipdeptree cache if something got installed if "Successfully installed" in result.stdout: PIPDEPTREE_CACHE = None if integration.errors: return False return True<|fim▁end|>
<|file_name|>DrumMachine_Config.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> switch (rowIndex) { case 0: //bass return [0, 10]; case 1: //snare return [4, 12]; case 2: //cymbal return []; case 3: //hihat return [2,7,9,12]; } return []; } export function VolumeSliderOptions(size: number): SliderOptions { return { initPerc: .7, dir: Direction.VERTICAL, knob: { radius: 25, color: 0xFF0000 }, track: { sizeX: 50, sizeY: size, color: 0x00ff00 } } } export function SpeedSliderOptions(size: number): SliderOptions { return { initPerc: .5, dir: Direction.HORIZONTAL, knob: { radius: 25, color: 0xFF0000 }, track: { sizeX: size, sizeY: 50, color: 0x00ff00 } } }<|fim▁end|>
export const sampleIds = ["bass", "snare", "cymbal", "hihat"]; import { Direction, SliderOptions } from "../../../lib/slider/Slider"; export function RowConfig(rowIndex: number): Array<number> {
<|file_name|>AddLivetickerActivity.java<|end_file_name|><|fim▁begin|>package com.sunilson.pro4.activities; import android.app.DatePickerDialog;<|fim▁hole|>import android.content.Intent; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.v7.widget.Toolbar; import android.view.View; import android.widget.CompoundButton; import android.widget.DatePicker; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.Switch; import android.widget.TextView; import android.widget.TimePicker; import android.widget.Toast; import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.Task; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.FirebaseUser; import com.google.firebase.database.DataSnapshot; import com.google.firebase.database.DatabaseError; import com.google.firebase.database.DatabaseReference; import com.google.firebase.database.FirebaseDatabase; import com.google.firebase.database.ValueEventListener; import com.sunilson.pro4.R; import com.sunilson.pro4.baseClasses.Liveticker; import com.sunilson.pro4.exceptions.LivetickerSetException; import com.sunilson.pro4.utilities.Constants; import com.sunilson.pro4.views.SubmitButtonBig; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.Locale; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.OnClick; public class AddLivetickerActivity extends BaseActivity implements View.OnClickListener, CompoundButton.OnCheckedChangeListener { private ValueEventListener resultListener; private DatabaseReference mReference, currentResultReference; private boolean finished, startNow; private String privacy = "public"; private String livetickerID; private Calendar calendar; private ArrayList<DatabaseReference> references = new ArrayList<>(); private CompoundButton.OnCheckedChangeListener switchListener; @BindView(R.id.add_liveticker_date) TextView dateTextView; @BindView(R.id.add_liveticker_time) TextView timeTextView; @BindView(R.id.add_liveticker_title_edittext) EditText titleEditText; @BindView(R.id.add_liveticker_description_edittext) EditText descriptionEditText; @BindView(R.id.add_liveticker_status_edittext) EditText statusEditText; @BindView(R.id.add_liveticker_start_switch) Switch dateSwitch; @BindView(R.id.add_liveticker_privacy_switch) Switch privacySwitch; @BindView(R.id.add_liveticker_date_layout) LinearLayout dateLayout; @BindView(R.id.add_liveticker_privacy_title) TextView privacyTitle; @BindView(R.id.submit_button_view) SubmitButtonBig submitButtonBig; @OnClick(R.id.submit_button) public void submit(View view) { final FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); if (user == null || user.isAnonymous()) { Toast.makeText(this, R.string.add_liveticker_user_failure, Toast.LENGTH_SHORT).show(); return; } final Liveticker liveticker = new Liveticker(); try { liveticker.setTitle(titleEditText.getText().toString()); liveticker.setDescription(descriptionEditText.getText().toString()); liveticker.setAuthorID(user.getUid()); liveticker.setStateTimestamp(calendar.getTimeInMillis()); liveticker.setPrivacy(privacy); liveticker.setStatus(statusEditText.getText().toString()); } catch (LivetickerSetException e) { Toast.makeText(this, e.getMessage(), Toast.LENGTH_SHORT).show(); return; } loading(true); final DatabaseReference ref = FirebaseDatabase.getInstance().getReference("/request/" + user.getUid() + "/addLiveticker/").push(); ref.setValue(liveticker).addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { //Remove Event Listener from Queue, if it has been started if (currentResultReference != null && resultListener != null) { currentResultReference.removeEventListener(resultListener); } //Listen for results from Queue DatabaseReference taskRef = FirebaseDatabase.getInstance().getReference("/result/" + user.getUid() + "/addLiveticker/" + ref.getKey()); //Add Listener to Reference and store Reference so we can later detach Listener taskRef.addValueEventListener(resultListener); currentResultReference = taskRef; } }); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_add_liveticker); ButterKnife.bind(this); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); getSupportActionBar().setDisplayHomeAsUpEnabled(true); mReference = FirebaseDatabase.getInstance().getReference(); initializeQueueListener(); calendar = Calendar.getInstance(); updateDateTime(); dateTextView.setOnClickListener(this); timeTextView.setOnClickListener(this); dateSwitch.setOnCheckedChangeListener(this); privacySwitch.setOnCheckedChangeListener(this); submitButtonBig.setText(getString(R.string.channel_edit_save), getString(R.string.loading)); } @Override protected void onStop() { super.onStop(); //Remove Event Listener from Queue, if it has been started if (currentResultReference != null && resultListener != null) { currentResultReference.removeEventListener(resultListener); } } @Override protected void authChanged(FirebaseUser user) { if (user.isAnonymous()) { Intent i = new Intent(AddLivetickerActivity.this, MainActivity.class); startActivity(i); Toast.makeText(this, R.string.no_access_permission, Toast.LENGTH_SHORT).show(); } } /** * Initialize Listener for "Add Liveticker Queue" */ private void initializeQueueListener() { resultListener = new ValueEventListener() { @Override public void onDataChange(DataSnapshot dataSnapshot) { if (!finished) { //Check what state the Queue event has if (dataSnapshot.child("state").getValue() != null) { //Liveticker was added successfully if (dataSnapshot.child("state").getValue().toString().equals("success")) { finished = true; Intent i = new Intent(); i.putExtra("livetickerID", dataSnapshot.child("successDetails").getValue().toString()); setResult(Constants.ADD_LIVETICKER_RESULT_CODE, i); finish(); } else if (dataSnapshot.child("state").getValue().toString().equals("error")) { loading(false); Toast.makeText(AddLivetickerActivity.this, dataSnapshot.child("errorDetails").getValue().toString(), Toast.LENGTH_LONG).show(); } } } } @Override public void onCancelled(DatabaseError databaseError) { } }; } /** * Change visual loading state * * @param loading */ private void loading(boolean loading) { if (loading) { submitButtonBig.loading(true); } else { submitButtonBig.loading(false); } } @Override public void onClick(View view) { switch (view.getId()) { case R.id.add_liveticker_date: showDateDialog(); break; case R.id.add_liveticker_time: showTimeDialog(); break; } } /** * A dialog to pick a date and set the calendar to that date */ private void showDateDialog() { DatePickerDialog.OnDateSetListener onDateSetListener = new DatePickerDialog.OnDateSetListener() { @Override public void onDateSet(DatePicker datePicker, int year, int month, int dayOfMonth) { calendar.set(Calendar.MONTH, month); calendar.set(Calendar.YEAR, year); calendar.set(Calendar.DAY_OF_MONTH, dayOfMonth); updateDateTime(); } }; DatePickerDialog datePickerDialog = new DatePickerDialog(this, onDateSetListener, calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH), calendar.get(Calendar.DAY_OF_MONTH)); datePickerDialog.getDatePicker().setMinDate(System.currentTimeMillis() - 1000); datePickerDialog.getDatePicker().setMaxDate(System.currentTimeMillis() + 432000000); datePickerDialog.show(); } /** * A dialog to pick a time and set the calendar to that time */ private void showTimeDialog() { TimePickerDialog.OnTimeSetListener onTimeSetListener = new TimePickerDialog.OnTimeSetListener() { @Override public void onTimeSet(TimePicker timePicker, int hour, int minute) { calendar.set(Calendar.HOUR_OF_DAY, hour); calendar.set(Calendar.MINUTE, minute); updateDateTime(); } }; TimePickerDialog timePickerDialog = new TimePickerDialog(this, onTimeSetListener, calendar.get(Calendar.HOUR_OF_DAY), calendar.get(Calendar.MINUTE), true); timePickerDialog.show(); } /** * Update the Textviews with the current date from the calendar */ private void updateDateTime() { Date date = calendar.getTime(); SimpleDateFormat formatDate = new SimpleDateFormat("dd.MM.yyyy", Locale.getDefault()); SimpleDateFormat formatTime = new SimpleDateFormat("HH:mm", Locale.getDefault()); dateTextView.setText(formatDate.format(date)); timeTextView.setText(formatTime.format(date)); } /** * When a switch gets toggled * * @param compoundButton Switch that was toggled * @param b Value of switch */ @Override public void onCheckedChanged(CompoundButton compoundButton, boolean b) { switch (compoundButton.getId()) { case R.id.add_liveticker_start_switch: startNow = !startNow; if (b) { dateLayout.setVisibility(View.GONE); } else { dateLayout.setVisibility(View.VISIBLE); } break; case R.id.add_liveticker_privacy_switch: if (b) { privacy = "public"; privacyTitle.setText(getString(R.string.add_liveticker_public_title)); } else { privacy = "private"; privacyTitle.setText(getString(R.string.add_liveticker_public_title_private)); } break; } } }<|fim▁end|>
import android.app.TimePickerDialog;
<|file_name|>views.py<|end_file_name|><|fim▁begin|>#coding:utf-8 from django.shortcuts import render,render_to_response,redirect from django.core.urlresolvers import reverse from block.models import Blocks from models import Article from django.template import RequestContext from django.contrib import messages from django.contrib.auth.decorators import login_required # Create your views here. def article_list(request,block_id): block_id=int(block_id) block=Blocks.objects.get(id=block_id) articles=Article.objects.filter(block=block).order_by("-last_update_timestamp") return render_to_response("articles_list.html",{"articles":articles,"b":block},context_instance=RequestContext(request)) @login_required() def article_create(request,block_id):<|fim▁hole|> block_id=int(block_id) block=Blocks.objects.get(id=block_id) if request.method=="GET": return render_to_response("article_create.html",{"b":block},context_instance=RequestContext(request)) else: title=request.POST['title'].strip() content=request.POST['content'].strip() if not title or not content: messages.add_message(request,messages.ERROR,u'标题和内容不能为空!') return render_to_response("article_create.html",{"b":block,"title":title,"content":content},context_instance=RequestContext(request)) new_article=Article(block=block,title=title,content=content,owner=request.user) new_article.save() messages.add_message(request,messages.INFO,u'成功发表文章!') return redirect(reverse("article_list",args=[block.id,])) def article_detail(requset,article_id): article_id=int(article_id) article=Article.objects.get(id=article_id) return render_to_response("article_detail.html",{"article":article},context_instance=RequestContext(requset))<|fim▁end|>
<|file_name|>config.js<|end_file_name|><|fim▁begin|>'use strict' var config = {} config.facebook = { 'appID': '261938654297222', 'appSecret': 'cd8d0bf4ce75ae5e24be29970b79876f', 'callbackUrl': '/login/facebook/callback/' } config.server = { 'port': process.env.PORT || 3000, 'env': process.env.NODE_ENV || 'dev', 'dbUrl': process.env.MONGODB_URI || 'mongodb://localhost:27017/minihero', 'sessionSecret': 'Minihero FTW!' } config.defaultLocation = {<|fim▁hole|> latitude: 52.370216, longitude: 4.895168 } config.apiKeys = { google: 'AIzaSyA4vKjKRLNIZ829rfFvz9m_-OFhiORB5Q8' } module.exports = config<|fim▁end|>
// The default location shown to signed out users on /missions is Amsterdam!
<|file_name|>KmdImages.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- """Detect same images and try to merge metadatas before deleting duplicates""" import PIL.Image as Image import PIL.ImageChops as ImageChops import sys import os import pyexiv2<|fim▁hole|>import logging #Handler for merging properties def handler_mergeList(a, b): """List merger""" #FIXME : there is certainly a better python way ! for p in b : if p not in a: a.append(p) return a def handler_minDate(a, b): """Minimum date""" if a < b : return a else : return b def handler_keepMain(a, b): """Keep left""" return a def handler_Exif_Image_Orientation(a, b): """Assert : the higher is better, mainly because 1 is 'no orientation'""" if a > b : return a else : return b def handler_concat(a, b): return (a+b) def handler_Iptc_Application2_ProgramVersion(a, b): try : la = [int(x) for x in a[0].split(".")] lb = [int(x) for x in b[0].split(".")] if la > lb : return [".".join([str(x) for x in la])] else : return [".".join([str(x) for x in lb])] except : if a > b : return a else : return b exiv_changed_keywords = ["merged_kmd"] #Tag set when pictures are merged (if IPTC is in use) #Match exif/iptc properties to do the merge exiv_handlers = { #Keep left (main pictures) "Iptc.Application2.ProgramVersion" : handler_Iptc_Application2_ProgramVersion, "Exif.Image.Software" : handler_keepMain, #Concat "Exif.Photo.UserComment" : handler_concat, #Lists "Iptc.Application2.Keywords" : handler_mergeList, #Orientation "Exif.Image.Orientation" : handler_Exif_Image_Orientation, "Exif.Thumbnail.Orientation" : handler_Exif_Image_Orientation, #Dates "Exif.Image.DateTime" : handler_minDate, "Exif.Photo.DateTimeOriginal" : handler_minDate, } #Don't try to do anything with these properties exiv_ignored_properties = ["Exif.Thumbnail.JPEGInterchangeFormat", "Exif.Image.ExifTag", "Exif.Photo.InteroperabilityTag", "Exif.Photo.MakerNote", "Exif.MakerNote.Offset"] def comparePilImages(img1, img2): """Compare 2 PIL.Images and return True if there is no difference""" try : diff = ImageChops.difference(img1, img2) bbox = diff.getbbox() del(diff) except : return False return bbox == None def compareImagesFiles(f1, f2): """Load two files in PIL, and compare""" img1 = Image.open(f1) img2 = Image.open(f2) return comparePilImages(img1, img2) def compareImagesFolder(folder, quick = False): """Compare images in a folder""" logging.debug("Comparing images in %s", folder) files = [os.path.join(folder, x) for x in os.listdir(folder)] return compareImagesCollection(files, quick) def compareImagesCollection(files, quick = True): imgf = [] samef = [] for fpath in files : if not os.path.isfile(fpath): #Only try to load files ! logging.info("Not a file : %s" % fpath) continue try: Image.open(fpath) imgf.append(fpath) except: logging.info("Not an image : %s" % fpath) pass for f1 in imgf: imgf1 = Image.open(f1) for f2 in imgf: if f2 <= f1 : continue if quick and abs(os.path.getsize(f1) - os.path.getsize(f2)) > 1000 : continue imgf2 = Image.open(f2) if comparePilImages(imgf1, imgf2): logging.debug("Match Images : %s = %s", f1, f2) found = False for fl in samef: #FIXME : Should rewrite this ! if f1 in fl and f2 in fl : found = True break if f1 in fl: fl.append(f2) found = True break if f2 in fl: fl.append(f1) found = True break if not found : #Images are the same, but not already matched samef.append([f1,f2]) del(imgf) return(samef) def readExivMetadata(path): try : meta = pyexiv2.ImageMetadata(path) meta.read() return meta except : logging.debug("This file has no exiv metadatas : %s", path) return False def mergeExivMetadata(sameImages, doit = False): #FIXME : should be clevier to choose the mainImage write = False mainI = sameImages[0] metas = {} for pathI in sameImages: meta = pyexiv2.ImageMetadata(pathI) meta.read() metas[pathI] = meta for pathI in sameImages[1:]: logging.debug("Comparing %s and %s", mainI, pathI) for k in metas[pathI].iptc_keys + metas[pathI].exif_keys: if k in exiv_ignored_properties: continue newval = None if k in metas[mainI].iptc_keys + metas[mainI].exif_keys : try : if metas[mainI][k].value != metas[pathI][k].value : logging.debug("Difference for %s", k) logging.debug("%s <> %s", metas[mainI][k].value, metas[pathI][k].value) if k in exiv_handlers : newval = exiv_handlers[k](metas[mainI][k].value, metas[pathI][k].value) logging.info("Merged property %s : %s", k, newval) else : logging.warn("NO HANDLER for %s", k) except : logging.warn("Coulnd't compare %s exif property for %s", k, mainI) else : newval = metas[pathI][k].value logging.info("Imported property %s : %s", k, newval) if newval != None : try : metas[mainI][k] = newval write = True except : logging.warn("Coulnd't setup %s exif property for %s", k, mainI) if write : if "Iptc.Application2.Keywords" in metas[mainI].iptc_keys: metas[mainI]["Iptc.Application2.Keywords"] = handler_mergeList(metas[mainI]["Iptc.Application2.Keywords"].value, exiv_changed_keywords) logging.info("Writing properties to %s", mainI) if doit : metas[mainI].write() for f in sameImages[1:] : logging.info("Removing %s", f) if doit : os.remove(f) for m in metas.keys(): del(metas[m]) del(metas) def cleanDir(folder, doit = False, quick = True): logging.info("Cleaning %s", folder) samef = compareImagesFolder(folder, quick = True) for s in samef : mergeExivMetadata(s, doit) del(samef) for f in os.listdir(folder): p = os.path.join(folder, f) if os.path.isdir(p): logging.debug("Testing %s", p) cleanDir(p, doit, quick = True)<|fim▁end|>
import datetime
<|file_name|>model.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap; use itertools::Itertools; use rustc_serialize::json; use ngrams::ngrams; use errors::DeserializeError; pub struct Model { pub ngram_ranks: HashMap<String, usize>, } impl Model { pub fn build_from_text(text: &str) -> Self { let mut ngram_counts = HashMap::new(); let words = text.split(|ch: char| !ch.is_alphabetic()).filter(|s| !s.is_empty()); for word in words { for n in 1..6 { for ngram in ngrams(word, n) { // If you don't want to unecessarily allocate strings, this is // the only way to do it. This RFC should fix this if it ever // gets accepted: // https://github.com/rust-lang/rfcs/pull/1533 if let Some(count) = ngram_counts.get_mut(ngram) { *count += 1; continue; } ngram_counts.insert(ngram.to_owned(), 1); } } } let ngrams = ngram_counts .into_iter() .sorted_by(|a, b| Ord::cmp(&b.1, &a.1)) .into_iter() .take(300) // Models need to have the same size, or have normalized "differences" .map(|(ngram, _count)| ngram);<|fim▁hole|> } pub fn deserialize(bytes: Vec<u8>) -> Result<Self, DeserializeError> { let string = try!(String::from_utf8(bytes)); let ngram_ranks = try!(json::decode(string.as_str())); let model = Model { ngram_ranks: ngram_ranks }; Ok(model) } pub fn serialize(&self) -> Vec<u8> { json::encode(&self.ngram_ranks).unwrap().into_bytes() } pub fn compare(&self, other: &Model) -> usize { let max_difference = other.ngram_ranks.len(); let mut difference = 0; for (ngram, rank) in &self.ngram_ranks { difference += match other.ngram_ranks.get(ngram) { Some(other_rank) => get_difference(*rank, *other_rank), None => max_difference, } } difference } } fn get_difference(a: usize, b: usize) -> usize { if a > b { a - b } else { b - a } } #[cfg(test)] mod tests { use super::*; #[test] fn serialization_and_deserialization() { let model = Model::build_from_text("Testing text for serialization"); let serialized = model.serialize(); let deserialized = Model::deserialize(serialized).unwrap(); assert_eq!(model.ngram_ranks, deserialized.ngram_ranks); } }<|fim▁end|>
// Nicer way to build a hash map. Model { ngram_ranks: ngrams.enumerate().map(|(a, b)| (b, a)).collect() }
<|file_name|>dynamic_lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Dynamic library facilities. //! //! A simple wrapper over the platform's dynamic library facilities #![unstable(feature = "std_misc")] #![allow(missing_docs)] use prelude::v1::*; use env; use ffi::{CString, OsString}; use mem; use path::{Path, PathBuf}; pub struct DynamicLibrary { handle: *mut u8 } impl Drop for DynamicLibrary { fn drop(&mut self) { match dl::check_for_errors_in(|| { unsafe { dl::close(self.handle) } }) { Ok(()) => {}, Err(str) => panic!("{}", str) } } } impl DynamicLibrary { /// Lazily open a dynamic library. When passed None it gives a /// handle to the calling process pub fn open(filename: Option<&Path>) -> Result<DynamicLibrary, String> { let maybe_library = dl::open(filename.map(|path| path.as_os_str())); // The dynamic library must not be constructed if there is // an error opening the library so the destructor does not // run. match maybe_library { Err(err) => Err(err), Ok(handle) => Ok(DynamicLibrary { handle: handle }) } } /// Prepends a path to this process's search path for dynamic libraries pub fn prepend_search_path(path: &Path) { let mut search_path = DynamicLibrary::search_path(); search_path.insert(0, path.to_path_buf()); env::set_var(DynamicLibrary::envvar(), &DynamicLibrary::create_path(&search_path)); } /// From a slice of paths, create a new vector which is suitable to be an /// environment variable for this platforms dylib search path. pub fn create_path(path: &[PathBuf]) -> OsString { let mut newvar = OsString::new(); for (i, path) in path.iter().enumerate() { if i > 0 { newvar.push(DynamicLibrary::separator()); } newvar.push(path); } return newvar; } /// Returns the environment variable for this process's dynamic library /// search path pub fn envvar() -> &'static str { if cfg!(windows) { "PATH" } else if cfg!(target_os = "macos") { "DYLD_LIBRARY_PATH" } else { "LD_LIBRARY_PATH" } } fn separator() -> &'static str { if cfg!(windows) { ";" } else { ":" } } /// Returns the current search path for dynamic libraries being used by this /// process pub fn search_path() -> Vec<PathBuf> { match env::var_os(DynamicLibrary::envvar()) { Some(var) => env::split_paths(&var).collect(), None => Vec::new(), } } /// Accesses the value at the symbol of the dynamic library. pub unsafe fn symbol<T>(&self, symbol: &str) -> Result<*mut T, String> { // This function should have a lifetime constraint of 'a on // T but that feature is still unimplemented let raw_string = CString::new(symbol).unwrap(); let maybe_symbol_value = dl::check_for_errors_in(|| { dl::symbol(self.handle, raw_string.as_ptr()) }); // The value must not be constructed if there is an error so // the destructor does not run. match maybe_symbol_value { Err(err) => Err(err), Ok(symbol_value) => Ok(mem::transmute(symbol_value)) } } } #[cfg(all(test, not(target_os = "ios")))] mod tests { use super::*; use prelude::v1::*; use libc; use mem; use path::Path; #[test] #[cfg_attr(any(windows, target_os = "android", // FIXME #10379 target_env = "musl"), ignore)] fn test_loading_cosine() { // The math library does not need to be loaded since it is already // statically linked in let libm = match DynamicLibrary::open(None) { Err(error) => panic!("Could not load self as module: {}", error), Ok(libm) => libm }; let cosine: extern fn(libc::c_double) -> libc::c_double = unsafe { match libm.symbol("cos") { Err(error) => panic!("Could not load function cos: {}", error), Ok(cosine) => mem::transmute::<*mut u8, _>(cosine) } }; let argument = 0.0; let expected_result = 1.0; let result = cosine(argument); if result != expected_result { panic!("cos({}) != {} but equaled {} instead", argument, expected_result, result) } } #[test] #[cfg(any(target_os = "linux", target_os = "macos", target_os = "freebsd", target_os = "dragonfly", target_os = "bitrig", target_os = "openbsd"))] fn test_errors_do_not_crash() { // Open /dev/null as a library to get an error, and make sure // that only causes an error, and not a crash. let path = Path::new("/dev/null"); match DynamicLibrary::open(Some(&path)) { Err(_) => {} Ok(_) => panic!("Successfully opened the empty library.") } } } #[cfg(any(target_os = "linux", target_os = "android", target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly", target_os = "bitrig",<|fim▁hole|> use ffi::{CStr, OsStr}; use str; use libc; use ptr; pub fn open(filename: Option<&OsStr>) -> Result<*mut u8, String> { check_for_errors_in(|| { unsafe { match filename { Some(filename) => open_external(filename), None => open_internal(), } } }) } const LAZY: libc::c_int = 1; unsafe fn open_external(filename: &OsStr) -> *mut u8 { let s = filename.to_cstring().unwrap(); dlopen(s.as_ptr(), LAZY) as *mut u8 } unsafe fn open_internal() -> *mut u8 { dlopen(ptr::null(), LAZY) as *mut u8 } pub fn check_for_errors_in<T, F>(f: F) -> Result<T, String> where F: FnOnce() -> T, { use sync::StaticMutex; static LOCK: StaticMutex = StaticMutex::new(); unsafe { // dlerror isn't thread safe, so we need to lock around this entire // sequence let _guard = LOCK.lock(); let _old_error = dlerror(); let result = f(); let last_error = dlerror() as *const _; let ret = if ptr::null() == last_error { Ok(result) } else { let s = CStr::from_ptr(last_error).to_bytes(); Err(str::from_utf8(s).unwrap().to_string()) }; ret } } pub unsafe fn symbol(handle: *mut u8, symbol: *const libc::c_char) -> *mut u8 { dlsym(handle as *mut libc::c_void, symbol) as *mut u8 } pub unsafe fn close(handle: *mut u8) { dlclose(handle as *mut libc::c_void); () } extern { fn dlopen(filename: *const libc::c_char, flag: libc::c_int) -> *mut libc::c_void; fn dlerror() -> *mut libc::c_char; fn dlsym(handle: *mut libc::c_void, symbol: *const libc::c_char) -> *mut libc::c_void; fn dlclose(handle: *mut libc::c_void) -> libc::c_int; } } #[cfg(target_os = "windows")] mod dl { use prelude::v1::*; use ffi::OsStr; use libc; use libc::consts::os::extra::ERROR_CALL_NOT_IMPLEMENTED; use sys::os; use os::windows::prelude::*; use ptr; use sys::c::compat::kernel32::SetThreadErrorMode; pub fn open(filename: Option<&OsStr>) -> Result<*mut u8, String> { // disable "dll load failed" error dialog. let mut use_thread_mode = true; let prev_error_mode = unsafe { // SEM_FAILCRITICALERRORS 0x01 let new_error_mode = 1; let mut prev_error_mode = 0; // Windows >= 7 supports thread error mode. let result = SetThreadErrorMode(new_error_mode, &mut prev_error_mode); if result == 0 { let err = os::errno(); if err as libc::c_int == ERROR_CALL_NOT_IMPLEMENTED { use_thread_mode = false; // SetThreadErrorMode not found. use fallback solution: // SetErrorMode() Note that SetErrorMode is process-wide so // this can cause race condition! However, since even // Windows APIs do not care of such problem (#20650), we // just assume SetErrorMode race is not a great deal. prev_error_mode = SetErrorMode(new_error_mode); } } prev_error_mode }; unsafe { SetLastError(0); } let result = match filename { Some(filename) => { let filename_str: Vec<_> = filename.encode_wide().chain(Some(0).into_iter()).collect(); let result = unsafe { LoadLibraryW(filename_str.as_ptr() as *const libc::c_void) }; // beware: Vec/String may change errno during drop! // so we get error here. if result == ptr::null_mut() { let errno = os::errno(); Err(os::error_string(errno)) } else { Ok(result as *mut u8) } } None => { let mut handle = ptr::null_mut(); let succeeded = unsafe { GetModuleHandleExW(0 as libc::DWORD, ptr::null(), &mut handle) }; if succeeded == libc::FALSE { let errno = os::errno(); Err(os::error_string(errno)) } else { Ok(handle as *mut u8) } } }; unsafe { if use_thread_mode { SetThreadErrorMode(prev_error_mode, ptr::null_mut()); } else { SetErrorMode(prev_error_mode); } } result } pub fn check_for_errors_in<T, F>(f: F) -> Result<T, String> where F: FnOnce() -> T, { unsafe { SetLastError(0); let result = f(); let error = os::errno(); if 0 == error { Ok(result) } else { Err(format!("Error code {}", error)) } } } pub unsafe fn symbol(handle: *mut u8, symbol: *const libc::c_char) -> *mut u8 { GetProcAddress(handle as *mut libc::c_void, symbol) as *mut u8 } pub unsafe fn close(handle: *mut u8) { FreeLibrary(handle as *mut libc::c_void); () } #[allow(non_snake_case)] extern "system" { fn SetLastError(error: libc::size_t); fn LoadLibraryW(name: *const libc::c_void) -> *mut libc::c_void; fn GetModuleHandleExW(dwFlags: libc::DWORD, name: *const u16, handle: *mut *mut libc::c_void) -> libc::BOOL; fn GetProcAddress(handle: *mut libc::c_void, name: *const libc::c_char) -> *mut libc::c_void; fn FreeLibrary(handle: *mut libc::c_void); fn SetErrorMode(uMode: libc::c_uint) -> libc::c_uint; } }<|fim▁end|>
target_os = "openbsd"))] mod dl { use prelude::v1::*;
<|file_name|>SelectionsModal.js<|end_file_name|><|fim▁begin|>/* * Copyright 2012 OSBI Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and<|fim▁hole|> /** * Dialog for member selections */ var SelectionsModal = Modal.extend({ type: "selections", paramvalue: null, buttons: [ { text: "OK", method: "save" }, { text: "Open Date Filter", method: "open_date_filter" }, { text: "Cancel", method: "close" } ], events: { 'click a': 'call', 'click .search_term' : 'search_members', 'click .clear_search' : 'clear_search', 'change #show_unique': 'show_unique_action', 'change #use_result': 'use_result_action', 'dblclick .selection_options li.option_value label' : 'click_move_selection', 'click li.all_options' : 'click_all_member_selection', 'change #show_totals': 'show_totals_action' //,'click div.updown_buttons a.form_button': 'updown_selection' }, show_unique_option: false, use_result_option: Settings.MEMBERS_FROM_RESULT, show_totals_option: [], members_limit: Settings.MEMBERS_LIMIT, members_search_limit: Settings.MEMBERS_SEARCH_LIMIT, members_search_server: false, selection_type: "INCLUSION", initialize: function(args) { // Initialize properties var self = this; _.extend(this, args); this.options.title = "<span class='i18n'>Selections for</span> " + this.name; this.message = "Fetching members..."; this.query = args.workspace.query; this.selected_members = []; this.available_members = []; this.topLevel; _.bindAll(this, "fetch_members", "populate", "finished", "get_members", "use_result_action", "show_totals_action"); // Determine axis this.axis = "undefined"; if (args.axis) { this.axis = args.axis; if (args.axis == "FILTER") { this.use_result_option = false; } } else { if (args.target.parents('.fields_list_body').hasClass('rows')) { this.axis = "ROWS"; } if (args.target.parents('.fields_list_body').hasClass('columns')) { this.axis = "COLUMNS"; } if (args.target.parents('.fields_list_body').hasClass('filter')) { this.axis = "FILTER"; this.use_result_option = false; } } // Resize when rendered this.bind('open', this.post_render); this.render(); $(this.el).parent().find('.ui-dialog-titlebar-close').bind('click',this.finished); // Fetch available members this.member = new Member({}, { cube: args.workspace.selected_cube, dimension: args.key }); // Load template $(this.el).find('.dialog_body') .html(_.template($("#template-selections").html())(this)); var hName = this.member.hierarchy; var lName = this.member.level; var hierarchy = this.workspace.query.helper.getHierarchy(hName); var level = null; if (hierarchy && hierarchy.levels.hasOwnProperty(lName)) { level = hierarchy.levels[lName]; } if ((this.source === 'DateFilterModal' && (_.has(level, 'selection') && level.selection.members.length === 0)) || (this.source === 'DateFilterModal' && (_.size(level) === 1 && _.has(level, 'name')))) { this.$el.find('.dialog_footer a:nth-child(2)').show(); } else { this.$el.find('.dialog_footer a:nth-child(2)').hide(); } if (Settings.ALLOW_PARAMETERS) { if (level) { var pName = level.selection ? level.selection.parameterName : null; if (pName) { $(this.el).find('#parameter').val(pName); if(this.query.helper.model().parameters[pName]!=undefined) { this.paramvalue = this.query.helper.model().parameters[pName].split(","); } } } $(this.el).find('.parameter').removeClass('hidden'); } var showTotalsEl = $(this.el).find('#show_totals'); showTotalsEl.val(''); // fixme: we should check for deepest level here if (_.size(hierarchy.levels) > 1 && level && level.hasOwnProperty('aggregators') && level.aggregators) { if (level.aggregators.length > 0) { this.show_totals_option = level.aggregators; } showTotalsEl.removeAttr("disabled"); } else { showTotalsEl.attr("disabled", true); this.show_totals_option = []; } // showTotalsEl.val(this.show_totals_option); // showTotalsEl.removeAttr("disabled"); $(this.el).find('#use_result').attr('checked', this.use_result_option); $(this.el).find('.search_limit').text(this.members_search_limit); $(this.el).find('.members_limit').text(this.members_limit); var calcMembers = this.workspace.query.helper.getCalculatedMembers(); if (calcMembers.length > 0) { this.fetch_calcmembers_levels(); } else { this.get_members(); } }, open_date_filter: function(event) { event.preventDefault(); // Launch date filter dialog (new DateFilterModal({ dimension: this.objDateFilter.dimension, hierarchy: this.objDateFilter.hierarchy, target: this.target, name: this.name, data: this.objDateFilter.data, analyzerDateFormat: this.objDateFilter.analyzerDateFormat, dimHier: this.objDateFilter.dimHier, key: this.key, workspace: this.workspace })).open(); this.$el.dialog('destroy').remove(); }, show_totals_action: function(event) { this.show_totals_option = $(event.target).val(); }, get_members: function() { var self = this; var path = "/result/metadata/hierarchies/" + encodeURIComponent(this.member.hierarchy) + "/levels/" + encodeURIComponent(this.member.level); this.search_path = path; var message = '<span class="processing_image">&nbsp;&nbsp;</span> <span class="i18n">' + self.message + '</span> '; self.workspace.block(message); /** * gett isn't a typo, although someone should probably rename that method to avoid confusion. */ this.workspace.query.action.gett(path, { success: this.fetch_members, error: function() { self.workspace.unblock(); }, data: {result: this.use_result_option, searchlimit: this.members_limit }}); }, clear_search: function() { $(this.el).find('.filterbox').val(''); this.available_members = []; this.get_members(); }, search_members: function() { var self = this; var search_term = $(this.el).find('.filterbox').val(); if (!search_term) return false; var message = '<span class="processing_image">&nbsp;&nbsp;</span> <span class="i18n">Searching for members matching:</span> ' + search_term; self.workspace.block(message); self.workspace.query.action.gett(self.search_path, { async: false, success: function(response, model) { if (model && model.length > 0) { self.available_members = model; } self.populate(); }, error: function () { self.workspace.unblock(); }, data: { search: search_term, searchlimit: self.members_search_limit } }); }, fetch_calcmembers_levels: function() { var dimHier = this.member.hierarchy.split('].['); var m4=true; if(dimHier.length===1){ m4=false; dimHier = this.member.hierarchy.split('.'); } if(dimHier.length>1){ var hName = dimHier[1].replace(/[\[\]]/gi, ''); } var dName = dimHier[0].replace(/[\[\]]/gi, ''); var message = '<span class="processing_image">&nbsp;&nbsp;</span> <span class="i18n">' + this.message + '</span> '; this.workspace.block(message); if(!m4){ if(hName!=undefined) { hName = dName + "." + hName; } else{ hName = dName; } } var level = new Level({}, { ui: this, cube: this.workspace.selected_cube, dimension: dName, hierarchy: hName }); level.fetch({ success: this.get_levels }); }, get_levels: function(model, response) { if (response && response.length > 0) { model.ui.topLevel = response[0]; model.ui.get_members(); } }, get_calcmembers: function() { var self = this; var hName = this.member.hierarchy; var calcMembers = this.workspace.query.helper.getCalculatedMembers(); var arrCalcMembers = []; if (this.topLevel.name === this.member.level) { _.filter(calcMembers, function(value) { if (value.hierarchyName === hName && _.isEmpty(value.parentMember)) { value.uniqueName = value.hierarchyName + '.[' + value.name + ']'; arrCalcMembers.push(value); } }); } else { _.filter(calcMembers, function(value) { if (value.hierarchyName === hName && value.parentMemberLevel === self.member.level) { value.uniqueName = value.parentMember + '.[' + value.name + ']'; arrCalcMembers.push(value); } }); } return arrCalcMembers; }, fetch_members: function(model, response) { var self = this; if (response && response.length > 0) { _.each(response, function(f){ var cmem = self.workspace.query.helper.getCalculatedMembers(); var calc = false; if(cmem && cmem.length>0){ _.each(cmem, function(c){ if(c.uniqueName === f.uniqueName){ calc = true; } }) } self.available_members.push({obj:f, calc:calc}); }); } this.populate(); }, populate: function(model, response) { var self = this; self.workspace.unblock(); this.members_search_server = (this.available_members.length >= this.members_limit || this.available_members.length == 0); self.show_unique_option = false; $(this.el).find('.options #show_unique').attr('checked',false); var calcMembers = this.workspace.query.helper.getCalculatedMembers(); if (calcMembers.length > 0) { var newCalcMembers = this.get_calcmembers(); var len = newCalcMembers.length; for (var i = 0; i < len; i++) { var calc = false; if(calcMembers && calcMembers.length>0){ _.each(calcMembers, function(c){ if(c.uniqueName === newCalcMembers[i].uniqueName){ calc = true; } }) } this.available_members.push({obj:newCalcMembers[i], calc:calc}); } } $(this.el).find('.items_size').text(this.available_members.length); if (this.members_search_server) { $(this.el).find('.warning').text("More items available than listed. Pre-Filter on server."); } else { $(this.el).find('.warning').text(""); } var hName = self.member.hierarchy; var lName = self.member.level; var hierarchy = self.workspace.query.helper.getHierarchy(hName); if (hierarchy && hierarchy.levels.hasOwnProperty(lName)) { this.selected_members = []; if(hierarchy.levels[lName].selection){ _.each(hierarchy.levels[lName].selection.members, function(f){ var cmem = self.workspace.query.helper.getCalculatedMembers(); var calc = false; if(cmem && cmem.length > 0){ _.each(cmem, function(c){ if(c.uniqueName === f.uniqueName){ calc = true; } }); } self.selected_members.push({obj: f, calc: calc}); }); } this.selection_type = hierarchy.levels[lName].selection ? hierarchy.levels[lName].selection.type : "INCLUSION"; } var used_members = []; // Populate both boxes for (var j = 0, len = this.selected_members.length; j < len; j++) { var member = this.selected_members[j]; used_members.push(member.obj.caption); } if ($(this.el).find('.used_selections .selection_options li.option_value' ).length == 0) { var selectedMembers = $(this.el).find('.used_selections .selection_options'); selectedMembers.empty(); var selectedHtml = _.template($("#template-selections-options").html())({ options: this.selected_members }); $(selectedMembers).html(selectedHtml); } // Add the selections totals var measuresArray = self.workspace.query.model.queryModel.details.measures; for (var j = 0; j < measuresArray.length; j++) { $(this.el).find('#div-totals-container').append(_.template($("#template-selections-totals").html())({measure: measuresArray[j]})); } $('#per_metrics_totals_checkbox').change(function() { if($(this).is(":checked")) { $('.per_metrics_container').show(); $('.all_metrics_container').hide(); } else { $('.per_metrics_container').hide(); $('.all_metrics_container').show(); } }); // Filter out used members this.available_members = _.select(this.available_members, function(o) { return used_members.indexOf(o.obj ? o.obj.caption : o.caption) === -1; }); if (this.available_members.length > 0) { var availableMembersSelect = $(this.el).find('.available_selections .selection_options'); availableMembersSelect.empty(); var selectedHtml = _.template($("#template-selections-options").html())({ options: this.available_members }); $(availableMembersSelect).html(selectedHtml); } if ($(self.el).find( ".selection_options.ui-selectable" ).length > 0) { $(self.el).find( ".selection_options" ).selectable( "destroy" ); } $(self.el).find( ".selection_options" ).selectable({ distance: 20, filter: "li", stop: function( event, ui ) { $(self.el).find( ".selection_options li.ui-selected input").each(function(index, element) { if (element && element.hasAttribute('checked')) { element.checked = true; } else { $(element).attr('checked', true); } $(element).parents('.selection_options').find('li.all_options input').prop('checked', true); }); $(self.el).find( ".selection_options li.ui-selected").removeClass('ui-selected'); }}); $(this.el).find('.filterbox').autocomplete({ minLength: 1, //(self.members_search_server ? 2 : 1), delay: 200, //(self.members_search_server ? 400 : 300), appendTo: ".autocomplete", source: function(request, response ) { var searchlist = self.available_members; var search_target = self.show_unique_option == false ? "caption" : "name"; var result = $.map( searchlist, function( item ) { var st = item.obj; var obj; if(st === undefined){ st = item; obj = st[search_target]; } else{ obj = st.caption; } if (obj.toLowerCase().indexOf(request.term.toLowerCase()) > -1) { var label = self.show_unique_option == false? st.caption : st.uniqueName; var value = self.show_unique_option == false? st.uniqueName : st.caption; return { label: label, value: value }; } }); response(result); }, select: function(event, ui) { var value = encodeURIComponent(ui.item.value); var label = ui.item.label; var searchVal = self.show_unique_option == false? ui.item.value : ui.item.label; var cap = self.show_unique_option == false? ui.item.label : ui.item.value; $(self.el).find('.available_selections .selection_options input[value="' + encodeURIComponent(searchVal) + '"]').parent().remove(); $(self.el).find('.used_selections .selection_options input[value="' + encodeURIComponent(searchVal) + '"]').parent().remove(); var option = '<li class="option_value"><input type="checkbox" class="check_option" value="' + encodeURIComponent(searchVal) + '" label="' + encodeURIComponent(cap) + '">' + label + '</input></li>'; $(option).appendTo($(self.el).find('.used_selections .selection_options ul')); $(self.el).find('.filterbox').val(''); ui.item.value = ""; }, close: function(event, ui) { //$('#filter_selections').val(''); //$(self.el).find('.filterbox').css({ "text-align" : " left"}); }, open: function( event, ui ) { //$(self.el).find('.filterbox').css({ "text-align" : " right"}); } }); $(this.el).find('.filterbox').autocomplete("enable"); if (this.selection_type === "EXCLUSION") { $(this.el).find('.selection_type_inclusion').prop('checked', false); $(this.el).find('.selection_type_exclusion').prop('checked', true); } else { $(this.el).find('.selection_type_inclusion').prop('checked', true); $(this.el).find('.selection_type_exclusion').prop('checked', false); } // Translate Saiku.i18n.translate(); // Show dialog Saiku.ui.unblock(); }, post_render: function(args) { var left = ($(window).width() - 1000)/2; var width = $(window).width() < 1040 ? $(window).width() : 1040; $(args.modal.el).parents('.ui-dialog') .css({ width: width, left: "inherit", margin:"0", height: 630 }) .offset({ left: left}); $('#filter_selections').attr("disabled", false); $(this.el).find('a[href=#save]').focus(); $(this.el).find('a[href=#save]').blur(); }, move_selection: function(event) { event.preventDefault(); var action = $(event.target).attr('id'); var $to = action.indexOf('add') !== -1 ? $(this.el).find('.used_selections .selection_options ul') : $(this.el).find('.available_selections .selection_options ul'); var $from = action.indexOf('add') !== -1 ? $(this.el).find('.available_selections .selection_options ul') : $(this.el).find('.used_selections .selection_options ul'); var $els = action.indexOf('all') !== -1 ? $from.find('li.option_value input').parent() : $from.find('li.option_value input:checked').parent(); $els.detach().appendTo($to); $(this.el).find('.selection_options ul li.option_value input:checked').prop('checked', false); $(this.el).find('.selection_options li.all_options input').prop('checked', false); }, updown_selection: function(event) { event.preventDefault(); return false; /* var action = $(event.target).attr('href').replace('#',''); if (typeof action != "undefined") { if ("up" == action) { $(this.el).find('.used_selections option:selected').insertBefore( $('.used_selections option:selected:first').prev()); } else if ("down" == action) { $(this.el).find('.used_selections option:selected').insertAfter( $('.used_selections option:selected:last').next()); } } */ }, click_all_member_selection: function(event, ui) { var checked = $(event.currentTarget).find('input').is(':checked'); if (!checked) { $(event.currentTarget).parent().find('li.option_value input').removeAttr('checked'); } else { $(event.currentTarget).parent().find('li.option_value input').prop('checked', true); } }, click_move_selection: function(event, ui) { event.preventDefault(); var to = ($(event.target).parent().parent().parent().parent().hasClass('used_selections')) ? '.available_selections' : '.used_selections'; $(event.target).parent().appendTo($(this.el).find(to +' .selection_options ul')); }, show_unique_action: function() { var self = this; this.show_unique_option= ! this.show_unique_option; if(this.show_unique_option === true) { $(this.el).find('.available_selections, .used_selections').addClass('unique'); $(this.el).find('.available_selections, .used_selections').removeClass('caption'); } else { $(this.el).find('.available_selections, .used_selections').addClass('caption'); $(this.el).find('.available_selections, .used_selections').removeClass('unique'); } }, use_result_action: function() { this.use_result_option = !this.use_result_option; this.get_members(); }, save: function() { this.query.updatedSelectionFromModal = true; var self = this; // Notify user that updates are in progress var $loading = $("<div>Saving...</div>"); $(this.el).find('.dialog_body').children().hide(); $(this.el).find('.dialog_body').prepend($loading); var show_u = this.show_unique_option; var hName = decodeURIComponent(self.member.hierarchy); var lName = decodeURIComponent(self.member.level) var hierarchy = self.workspace.query.helper.getHierarchy(hName); // Determine updates var updates = []; var totalsFunction = this.show_totals_option; // If no selections are used, add level if ($(this.el).find('.used_selections input').length === 0) { // nothing to do - include all members of this level } else { self.workspace.query.helper.removeAllLevelCalculatedMember(hName); // Loop through selections $(this.el).find('.used_selections .option_value input') .each(function(i, selection) { var value = $(selection).val(); if($(selection).hasClass("cmember")){ var caption = $(selection).attr('label'); self.workspace.toolbar.group_parents(); self.workspace.query.helper.includeLevelCalculatedMember(self.axis, hName, lName, decodeURIComponent(value), 0); updates.push({ uniqueName: decodeURIComponent(value), caption: decodeURIComponent(caption), type: "calculatedmember" }); } else { var caption = $(selection).attr('label'); updates.push({ uniqueName: decodeURIComponent(value), caption: decodeURIComponent(caption) }); } }); } var parameterName = $('#parameter').val(); if (hierarchy && hierarchy.levels.hasOwnProperty(lName)) { var totalsArray = []; if($('#per_metrics_totals_checkbox').is(":checked")) { $('.show_totals_select').each(function() { totalsArray.push($(this).val()); }); } else { var measuresArray = self.workspace.query.model.queryModel.details.measures; for (var j = 0; j < measuresArray.length; j++) { totalsArray.push($('#all_measures_select').val()); } } hierarchy.levels[lName]["aggregators"] = totalsArray; var selectionType = $(self.el).find('input.selection_type:checked').val(); selectionType = selectionType ? selectionType : "INCLUSION"; hierarchy.levels[lName].selection = { "type": selectionType, "members": updates }; if (Settings.ALLOW_PARAMETERS && parameterName) { hierarchy.levels[lName].selection["parameterName"] = parameterName; var parameters = self.workspace.query.helper.model().parameters; if (!parameters[parameterName]) { // self.workspace.query.helper.model().parameters[parameterName] = ""; } } } this.finished(); }, finished: function() { $('#filter_selections').remove(); this.available_members = null; $(this.el).find('.filterbox').autocomplete('destroy').remove(); $(this.el).dialog('destroy'); $(this.el).remove(); this.query.run(); } });<|fim▁end|>
* limitations under the License. */
<|file_name|>Divider.js<|end_file_name|><|fim▁begin|>/* @flow */ import * as React from 'react'; import cn from 'classnames'; import { StyleClasses } from '../theme/styleClasses'; type Props = { // An optional inline-style to apply to the overlay. style: ?Object, // An optional css className to apply. className: ?string, // Boolean if this divider should be inset relative to it's container inset: ?boolean, // Boolean if the divider should be vertical instead of horizontal. vertical: ?boolean, }; const BASE_ELEMENT = StyleClasses.DIVIDER; /** * The divider component will pass all other props such as style or * event listeners on to the component. */ class Divider extends React.PureComponent<Props, *> { props: Props; render() { const { className, inset, vertical, ...props } = this.props; const Component = vertical ? 'div' : 'hr'; return ( <Component {...props} className={cn( BASE_ELEMENT,<|fim▁hole|> 'boldrui-divider__vertical': vertical, 'boldrui-divider__inset': inset, }, className, )} /> ); } } export default Divider;<|fim▁end|>
{
<|file_name|>defaults.py<|end_file_name|><|fim▁begin|>""" Django settings for the admin project. """ import os from urlparse import urlparse from website import settings as osf_settings from django.contrib import messages from api.base.settings import * # noqa # TODO ALL SETTINGS FROM API WILL BE IMPORTED AND WILL NEED TO BE OVERRRIDEN # TODO THIS IS A STEP TOWARD INTEGRATING ADMIN & API INTO ONE PROJECT # import local # Build own local.py (used with postgres)<|fim▁hole|># Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/ # from the OSF settings # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = osf_settings.SECRET_KEY # Don't allow migrations DATABASE_ROUTERS = ['admin.base.db.router.NoMigrationRouter'] # SECURITY WARNING: don't run with debug turned on in production! DEBUG = osf_settings.DEBUG_MODE DEBUG_PROPAGATE_EXCEPTIONS = True # session: SESSION_COOKIE_NAME = 'admin' SESSION_COOKIE_SECURE = osf_settings.SECURE_MODE SESSION_COOKIE_HTTPONLY = osf_settings.SESSION_COOKIE_HTTPONLY # csrf: CSRF_COOKIE_NAME = 'admin-csrf' CSRF_COOKIE_SECURE = osf_settings.SECURE_MODE # set to False: prereg uses a SPA and ajax and grab the token to use it in the requests CSRF_COOKIE_HTTPONLY = False ALLOWED_HOSTS = [ '.osf.io' ] AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': { 'min_length': 5, } }, ] USE_L10N = False # Email settings. Account created for testing. Password shouldn't be hardcoded # [DEVOPS] this should be set to 'django.core.mail.backends.smtp.EmailBackend' in the > dev local.py. EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # Sendgrid Email Settings - Using OSF credentials. # Add settings references to local.py EMAIL_HOST = osf_settings.MAIL_SERVER EMAIL_HOST_USER = osf_settings.MAIL_USERNAME EMAIL_HOST_PASSWORD = osf_settings.MAIL_PASSWORD EMAIL_PORT = 587 EMAIL_USE_TLS = True # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.messages', 'django.contrib.sessions', 'django.contrib.staticfiles', # 3rd party 'raven.contrib.django.raven_compat', 'webpack_loader', 'django_nose', 'password_reset', # OSF 'osf', # Addons 'addons.osfstorage', 'addons.wiki', 'addons.twofactor', # Internal apps 'admin.common_auth', 'admin.base', 'admin.pre_reg', 'admin.spam', 'admin.metrics', 'admin.nodes', 'admin.users', 'admin.desk', 'admin.meetings', ) MIGRATION_MODULES = { 'osf': None, 'addons_osfstorage': None, 'addons_wiki': None, 'addons_twofactor': None, } USE_TZ = True # local development using https if osf_settings.SECURE_MODE and osf_settings.DEBUG_MODE: INSTALLED_APPS += ('sslserver',) # Custom user model (extends AbstractBaseUser) AUTH_USER_MODEL = 'osf.OSFUser' # TODO: Are there more granular ways to configure reporting specifically related to the API? RAVEN_CONFIG = { 'tags': {'App': 'admin'}, 'dsn': osf_settings.SENTRY_DSN, 'release': osf_settings.VERSION, } # Settings related to CORS Headers addon: allow API to receive authenticated requests from OSF # CORS plugin only matches based on "netloc" part of URL, so as workaround we add that to the list CORS_ORIGIN_ALLOW_ALL = False CORS_ORIGIN_WHITELIST = (urlparse(osf_settings.DOMAIN).netloc, osf_settings.DOMAIN, ) CORS_ALLOW_CREDENTIALS = True MIDDLEWARE_CLASSES = ( # TokuMX transaction support # Needs to go before CommonMiddleware, so that transactions are always started, # even in the event of a redirect. CommonMiddleware may cause other middlewares' # process_request to be skipped, e.g. when a trailing slash is omitted 'api.base.middleware.DjangoGlobalMiddleware', 'api.base.middleware.CeleryTaskMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) MESSAGE_TAGS = { messages.SUCCESS: 'text-success', messages.ERROR: 'text-danger', messages.WARNING: 'text-warning', } TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], } }] ROOT_URLCONF = 'admin.base.urls' WSGI_APPLICATION = 'admin.base.wsgi.application' ADMIN_BASE = '' STATIC_URL = '/static/' LOGIN_URL = 'account/login/' LOGIN_REDIRECT_URL = ADMIN_BASE STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), 'static_root') STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), ) STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), ) LANGUAGE_CODE = 'en-us' WEBPACK_LOADER = { 'DEFAULT': { 'BUNDLE_DIR_NAME': 'public/js/', 'STATS_FILE': os.path.join(BASE_DIR, 'webpack-stats.json'), } } TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' NOSE_ARGS = ['--verbosity=2'] # Keen.io settings in local.py KEEN_PROJECT_ID = osf_settings.KEEN['private']['project_id'] KEEN_READ_KEY = osf_settings.KEEN['private']['read_key'] KEEN_WRITE_KEY = osf_settings.KEEN['private']['write_key'] KEEN_CREDENTIALS = { 'keen_ready': False } if KEEN_CREDENTIALS['keen_ready']: KEEN_CREDENTIALS.update({ 'keen_project_id': KEEN_PROJECT_ID, 'keen_read_key': KEEN_READ_KEY, 'keen_write_key': KEEN_WRITE_KEY }) ENTRY_POINTS = {'osf4m': 'osf4m', 'prereg_challenge_campaign': 'prereg', 'institution_campaign': 'institution'} # Set in local.py DESK_KEY = '' DESK_KEY_SECRET = ''<|fim▁end|>
# TODO - remove duplicated items, as this is now using settings from the API BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
<|file_name|>MathMaxMinEveryElementToNumber.java<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package org.mozilla.javascript.tests; import org.mozilla.javascript.Context; import org.mozilla.javascript.drivers.LanguageVersion; import org.mozilla.javascript.drivers.RhinoTest; import org.mozilla.javascript.drivers.ScriptTestsBase; @RhinoTest("testsrc/jstests/math-max-min-every-element-to-number.js") @LanguageVersion(Context.VERSION_DEFAULT) public class MathMaxMinEveryElementToNumber extends ScriptTestsBase<|fim▁hole|><|fim▁end|>
{ }
<|file_name|>ClusterView.java<|end_file_name|><|fim▁begin|>package org.cacheonix.impl.net.cluster; import java.util.List; import java.util.Set; import org.cacheonix.impl.net.ClusterNodeAddress; import org.cacheonix.impl.net.processor.ReceiverAddress; import org.cacheonix.impl.net.processor.UUID; import org.cacheonix.impl.net.serializer.Wireable; /** * Marker list. * <p/> * * @author <a href="mailto:[email protected]">Slava Imeshev</a> */ public interface ClusterView extends Wireable { void setOwner(ClusterNodeAddress owner); boolean isRepresentative(); ClusterNodeAddress getNextElement(); int getSize(); boolean remove(ClusterNodeAddress clusterNodeAddress); void insert(ClusterNodeAddress predecessor, final ClusterNodeAddress address); /** * @return representative. */ ClusterNodeAddress getRepresentative(); /** * @param elementAfter existing element after that to return a element * @throws IllegalStateException if the element is not in the list */ ClusterNodeAddress getNextElement(ClusterNodeAddress elementAfter) throws IllegalStateException; /** * Returns <code>true</code> if this marker list has a majority over te other list that we have common a ancestor * with. In other words, we and the another list a parts of some previous list. * * @param previousView * @return <code>true</code> if this marker list has a majority over other list. */ boolean hasMajorityOver(ClusterView previousView); /** * Returns a copy of the process list. * * @return a copy of the process list. */ List<ClusterNodeAddress> getClusterNodeList(); /** * {@inheritDoc} */ ClusterView copy(); /** * Returns this cluster's unique ID. * * @return this cluster's unique ID. */ UUID getClusterUUID(); /** * Returns <code>true</code> if the cluster view contains active node. *<|fim▁hole|> */ boolean contains(ClusterNodeAddress address); /** * Returns <code>true</code> if the cluster view contains active node. * * @param address ClusterNodeAddress to check. * @return <code>true</code> if the cluster view contains active node. */ boolean contains(ReceiverAddress address); /** * Calculates a collection of members that have left as compared to the previous view. * * @param previousClusterView previous cluster view * @return a collection of members that have left as compared to this previous view. */ Set<ClusterNodeAddress> calculateNodesLeft(ClusterView previousClusterView); /** * Calculates a collection of members that have joined as compared to the previous view. * * @param previousClusterView previous cluster view * @return a collection of members that have joined as compared to this previous view. */ Set<ClusterNodeAddress> calculateNodesJoined(ClusterView previousClusterView); ClusterNodeAddress getNextElement(ReceiverAddress elementAfter); ClusterNodeAddress greatestMember(); }<|fim▁end|>
* @param address ClusterNodeAddress to check. * @return <code>true</code> if the cluster view contains active node.
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># $Id$ # importing this module shouldn't directly cause other large imports # do large imports in the init() hook so that you can call back to the # ModuleManager progress handler methods. """vtk_kit package driver file. This performs all initialisation necessary to use VTK from DeVIDE. Makes sure that all VTK classes have ErrorEvent handlers that report back to the ModuleManager. Inserts the following modules in sys.modules: vtk, vtkdevide. @author: Charl P. Botha <http://cpbotha.net/> """ import re import sys import traceback import types VERSION = '' def preImportVTK(progressMethod): vtkImportList = [('vtk.common', 'VTK Common.'), ('vtk.filtering', 'VTK Filtering.'), ('vtk.io', 'VTK IO.'), ('vtk.imaging', 'VTK Imaging.'), ('vtk.graphics', 'VTK Graphics.'), ('vtk.rendering', 'VTK Rendering.'), ('vtk.hybrid', 'VTK Hybrid.'), #('vtk.patented', 'VTK Patented.'), ('vtk', 'Other VTK symbols')] # set the dynamic loading flags. If we don't do this, we get strange # errors on 64 bit machines. To see this happen, comment this statement # and then run the VTK->ITK connection test case. oldflags = setDLFlags() percentStep = 100.0 / len(vtkImportList) currentPercent = 0.0 # do the imports for module, message in vtkImportList: currentPercent += percentStep progressMethod(currentPercent, 'Initialising vtk_kit: %s' % (message,), noTime=True) exec('import %s' % (module,)) # restore previous dynamic loading flags resetDLFlags(oldflags) def setDLFlags(): # brought over from ITK Wrapping/CSwig/Python # Python "help(sys.setdlopenflags)" states: # # setdlopenflags(...) # setdlopenflags(n) -> None # # Set the flags that will be used for dlopen() calls. Among other # things, this will enable a lazy resolving of symbols when # importing a module, if called as sys.setdlopenflags(0) To share # symbols across extension modules, call as # # sys.setdlopenflags(dl.RTLD_NOW|dl.RTLD_GLOBAL) # # GCC 3.x depends on proper merging of symbols for RTTI: # http://gcc.gnu.org/faq.html#dso # try: import dl newflags = dl.RTLD_NOW|dl.RTLD_GLOBAL except: newflags = 0x102 # No dl module, so guess (see above). try: oldflags = sys.getdlopenflags() sys.setdlopenflags(newflags) except: oldflags = None return oldflags def resetDLFlags(data): # brought over from ITK Wrapping/CSwig/Python # Restore the original dlopen flags. try: sys.setdlopenflags(data) except: pass def init(module_manager, pre_import=True): # first do the VTK pre-imports: this is here ONLY to keep the user happy # it's not necessary for normal functioning if pre_import: preImportVTK(module_manager.setProgress) # import the main module itself # the global is so that users can also do: # from module_kits import vtk_kit # vtk_kit.vtk.vtkSomeFilter() global vtk import vtk <|fim▁hole|> # load up some generic functions into this namespace # user can, after import of module_kits.vtk_kit, address these as # module_kits.vtk_kit.blaat. In this case we don't need "global", # as these are modules directly in this package. import module_kits.vtk_kit.misc as misc import module_kits.vtk_kit.mixins as mixins import module_kits.vtk_kit.utils as utils import module_kits.vtk_kit.constants as constants import module_kits.vtk_kit.color_scales as color_scales # setup the kit version global VERSION VERSION = '%s' % (vtk.vtkVersion.GetVTKVersion(),)<|fim▁end|>
# and do the same for vtkdevide global vtkdevide import vtkdevide
<|file_name|>test-server-errors.js<|end_file_name|><|fim▁begin|>'use strict'; var http = require('http'); var Logger = require('bunyan');<|fim▁hole|>}); var server = http.createServer(function (request) { var data = ''; log.info({ url: request.url }, 'Incoming Request'); request.on('data', function (chunk) { data += chunk; }); throw new Error('expected error'); }); var port = 3000; server.listen(port); log.info({ port: port }, 'listening');<|fim▁end|>
var log = new Logger({ name: 'test-server', level: 'debug'
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from simplequeue.lib.configuration import config<|fim▁hole|> __all__ = ['config']<|fim▁end|>
<|file_name|>htmloptionelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use crate::dom::attr::Attr; use crate::dom::bindings::codegen::Bindings::CharacterDataBinding::CharacterDataMethods; use crate::dom::bindings::codegen::Bindings::HTMLOptionElementBinding; use crate::dom::bindings::codegen::Bindings::HTMLOptionElementBinding::HTMLOptionElementMethods; use crate::dom::bindings::codegen::Bindings::HTMLSelectElementBinding::HTMLSelectElementBinding::HTMLSelectElementMethods; use crate::dom::bindings::codegen::Bindings::NodeBinding::NodeMethods; use crate::dom::bindings::codegen::Bindings::WindowBinding::WindowMethods; use crate::dom::bindings::error::Fallible; use crate::dom::bindings::inheritance::Castable; use crate::dom::bindings::root::DomRoot; use crate::dom::bindings::str::DOMString; use crate::dom::characterdata::CharacterData; use crate::dom::document::Document; use crate::dom::element::{AttributeMutation, CustomElementCreationMode, Element, ElementCreator}; use crate::dom::htmlelement::HTMLElement; use crate::dom::htmlformelement::HTMLFormElement; use crate::dom::htmloptgroupelement::HTMLOptGroupElement; use crate::dom::htmlscriptelement::HTMLScriptElement; use crate::dom::htmlselectelement::HTMLSelectElement;<|fim▁hole|>use crate::dom::node::{BindContext, Node, ShadowIncluding, UnbindContext}; use crate::dom::text::Text; use crate::dom::virtualmethods::VirtualMethods; use crate::dom::window::Window; use dom_struct::dom_struct; use html5ever::{LocalName, Prefix, QualName}; use std::cell::Cell; use std::convert::TryInto; use style::element_state::ElementState; use style::str::{split_html_space_chars, str_join}; #[dom_struct] pub struct HTMLOptionElement { htmlelement: HTMLElement, /// <https://html.spec.whatwg.org/multipage/#attr-option-selected> selectedness: Cell<bool>, /// <https://html.spec.whatwg.org/multipage/#concept-option-dirtiness> dirtiness: Cell<bool>, } impl HTMLOptionElement { fn new_inherited( local_name: LocalName, prefix: Option<Prefix>, document: &Document, ) -> HTMLOptionElement { HTMLOptionElement { htmlelement: HTMLElement::new_inherited_with_state( ElementState::IN_ENABLED_STATE, local_name, prefix, document, ), selectedness: Cell::new(false), dirtiness: Cell::new(false), } } #[allow(unrooted_must_root)] pub fn new( local_name: LocalName, prefix: Option<Prefix>, document: &Document, ) -> DomRoot<HTMLOptionElement> { Node::reflect_node( Box::new(HTMLOptionElement::new_inherited( local_name, prefix, document, )), document, HTMLOptionElementBinding::Wrap, ) } // https://html.spec.whatwg.org/multipage/#dom-option #[allow(non_snake_case)] pub fn Option( window: &Window, text: DOMString, value: Option<DOMString>, default_selected: bool, selected: bool, ) -> Fallible<DomRoot<HTMLOptionElement>> { let element = Element::create( QualName::new(None, ns!(html), local_name!("option")), None, &window.Document(), ElementCreator::ScriptCreated, CustomElementCreationMode::Synchronous, ); let option = DomRoot::downcast::<HTMLOptionElement>(element).unwrap(); if !text.is_empty() { option.upcast::<Node>().SetTextContent(Some(text)) } if let Some(val) = value { option.SetValue(val) } option.SetDefaultSelected(default_selected); option.set_selectedness(selected); Ok(option) } pub fn set_selectedness(&self, selected: bool) { self.selectedness.set(selected); } pub fn set_dirtiness(&self, dirtiness: bool) { self.dirtiness.set(dirtiness); } fn pick_if_selected_and_reset(&self) { if let Some(select) = self .upcast::<Node>() .ancestors() .filter_map(DomRoot::downcast::<HTMLSelectElement>) .next() { if self.Selected() { select.pick_option(self); } select.ask_for_reset(); } } // https://html.spec.whatwg.org/multipage/#concept-option-index fn index(&self) -> i32 { if let Some(parent) = self.upcast::<Node>().GetParentNode() { if let Some(select_parent) = parent.downcast::<HTMLSelectElement>() { // return index in parent select's list of options return self.index_in_select(select_parent); } else if parent.is::<HTMLOptGroupElement>() { if let Some(grandparent) = parent.GetParentNode() { if let Some(select_grandparent) = grandparent.downcast::<HTMLSelectElement>() { // return index in grandparent select's list of options return self.index_in_select(select_grandparent); } } } } // "If the option element is not in a list of options, // then the option element's index is zero." // self is neither a child of a select, nor a grandchild of a select // via an optgroup, so it is not in a list of options 0 } fn index_in_select(&self, select: &HTMLSelectElement) -> i32 { match select.list_of_options().position(|n| &*n == self) { Some(index) => index.try_into().unwrap_or(0), None => { // shouldn't happen but not worth a browser panic warn!( "HTMLOptionElement called index_in_select at a select that did not contain it" ); 0 }, } } } // FIXME(ajeffrey): Provide a way of buffering DOMStrings other than using Strings fn collect_text(element: &Element, value: &mut String) { let svg_script = *element.namespace() == ns!(svg) && element.local_name() == &local_name!("script"); let html_script = element.is::<HTMLScriptElement>(); if svg_script || html_script { return; } for child in element.upcast::<Node>().children() { if child.is::<Text>() { let characterdata = child.downcast::<CharacterData>().unwrap(); value.push_str(&characterdata.Data()); } else if let Some(element_child) = child.downcast() { collect_text(element_child, value); } } } impl HTMLOptionElementMethods for HTMLOptionElement { // https://html.spec.whatwg.org/multipage/#dom-option-disabled make_bool_getter!(Disabled, "disabled"); // https://html.spec.whatwg.org/multipage/#dom-option-disabled make_bool_setter!(SetDisabled, "disabled"); // https://html.spec.whatwg.org/multipage/#dom-option-text fn Text(&self) -> DOMString { let mut content = String::new(); collect_text(self.upcast(), &mut content); DOMString::from(str_join(split_html_space_chars(&content), " ")) } // https://html.spec.whatwg.org/multipage/#dom-option-text fn SetText(&self, value: DOMString) { self.upcast::<Node>().SetTextContent(Some(value)) } // https://html.spec.whatwg.org/multipage/#dom-option-form fn GetForm(&self) -> Option<DomRoot<HTMLFormElement>> { let parent = self.upcast::<Node>().GetParentNode().and_then(|p| { if p.is::<HTMLOptGroupElement>() { p.upcast::<Node>().GetParentNode() } else { Some(p) } }); parent.and_then(|p| p.downcast::<HTMLSelectElement>().and_then(|s| s.GetForm())) } // https://html.spec.whatwg.org/multipage/#attr-option-value fn Value(&self) -> DOMString { let element = self.upcast::<Element>(); let attr = &local_name!("value"); if element.has_attribute(attr) { element.get_string_attribute(attr) } else { self.Text() } } // https://html.spec.whatwg.org/multipage/#attr-option-value make_setter!(SetValue, "value"); // https://html.spec.whatwg.org/multipage/#attr-option-label fn Label(&self) -> DOMString { let element = self.upcast::<Element>(); let attr = &local_name!("label"); if element.has_attribute(attr) { element.get_string_attribute(attr) } else { self.Text() } } // https://html.spec.whatwg.org/multipage/#attr-option-label make_setter!(SetLabel, "label"); // https://html.spec.whatwg.org/multipage/#dom-option-defaultselected make_bool_getter!(DefaultSelected, "selected"); // https://html.spec.whatwg.org/multipage/#dom-option-defaultselected make_bool_setter!(SetDefaultSelected, "selected"); // https://html.spec.whatwg.org/multipage/#dom-option-selected fn Selected(&self) -> bool { self.selectedness.get() } // https://html.spec.whatwg.org/multipage/#dom-option-selected fn SetSelected(&self, selected: bool) { self.dirtiness.set(true); self.selectedness.set(selected); self.pick_if_selected_and_reset(); } // https://html.spec.whatwg.org/multipage/#dom-option-index fn Index(&self) -> i32 { self.index() } } impl VirtualMethods for HTMLOptionElement { fn super_type(&self) -> Option<&dyn VirtualMethods> { Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods) } fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) { self.super_type().unwrap().attribute_mutated(attr, mutation); match attr.local_name() { &local_name!("disabled") => { let el = self.upcast::<Element>(); match mutation { AttributeMutation::Set(_) => { el.set_disabled_state(true); el.set_enabled_state(false); }, AttributeMutation::Removed => { el.set_disabled_state(false); el.set_enabled_state(true); el.check_parent_disabled_state_for_option(); }, } }, &local_name!("selected") => { match mutation { AttributeMutation::Set(_) => { // https://html.spec.whatwg.org/multipage/#concept-option-selectedness if !self.dirtiness.get() { self.selectedness.set(true); } }, AttributeMutation::Removed => { // https://html.spec.whatwg.org/multipage/#concept-option-selectedness if !self.dirtiness.get() { self.selectedness.set(false); } }, } }, _ => {}, } } fn bind_to_tree(&self, context: &BindContext) { if let Some(ref s) = self.super_type() { s.bind_to_tree(context); } self.upcast::<Element>() .check_parent_disabled_state_for_option(); self.pick_if_selected_and_reset(); } fn unbind_from_tree(&self, context: &UnbindContext) { self.super_type().unwrap().unbind_from_tree(context); if let Some(select) = context .parent .inclusive_ancestors(ShadowIncluding::No) .filter_map(DomRoot::downcast::<HTMLSelectElement>) .next() { select.ask_for_reset(); } let node = self.upcast::<Node>(); let el = self.upcast::<Element>(); if node.GetParentNode().is_some() { el.check_parent_disabled_state_for_option(); } else { el.check_disabled_attribute(); } } }<|fim▁end|>
<|file_name|>zip.py<|end_file_name|><|fim▁begin|>import os, tempfile, zipfile, tarfile, logging from django.core.servers.basehttp import FileWrapper from django.http import HttpResponse def get_zipfile(file_list): """ Create a ZIP file on disk and transmit it in chunks of 8KB, without loading the whole file into memory. """ temp = tempfile.TemporaryFile() archive = zipfile.ZipFile(temp, 'w', zipfile.ZIP_DEFLATED) for file in file_list: file = file.encode("utf-8") if os.path.exists(file): archive.write(file, os.path.basename(file)) else: logging.warn("zipfile could not find %s" % file) archive.close() wrapper = FileWrapper(temp) response = HttpResponse(wrapper, content_type='application/zip') response['Content-Disposition'] = 'attachment; filename=commcarehq.zip' response['Content-Length'] = temp.tell() # this seek is required for 'response' to work temp.seek(0) return response def build_tarfile(file_list, output_file): """ Creates a tarfile on disk, given a list of input files """ tar = tarfile.open(name=output_file, mode="w:bz2") if len (file_list) == 0: logging.info("No submissions could be found.") return HttpResponse("No submissions could be found.") for file in file_list: tar.add(file, os.path.basename(file) ) tar.close() return tar def get_tarfile(file_list, output_file): """ Creates a tarfile on disk, given a list of input files, and returns it as an http response. """ tar = build_tarfile(file_list, output_file) fin = open(output_file, 'rb')<|fim▁hole|> response['Content-Length'] = os.path.getsize(output_file) # this seek is required for 'response' to work return response class Compressor(object): """ Interface to create a compressed file on disk, given streams """ def open(self, output_file): raise NotImplementedError() def add_stream(self, stream, size=0, name=None ): raise NotImplementedError() def close(self): raise NotImplementedError() class TarCompressor(Compressor): """ Interface to create a tarfile on disk, given various input streams """ def __init__(self): self._tar = None def open(self, name=None, fileobj=None): if name == None and fileobj == None: raise ValueError('Either name or fileobj must be supplied to TarCompressor') self._tar = tarfile.open(name=name, fileobj=fileobj, mode="w:bz2") def add_stream(self, stream, size=0, name=None): tar_info = tarfile.TarInfo( name=name ) tar_info.size = size self._tar.addfile(tar_info, fileobj=stream) def add_file(self, file): self._tar.add(file, os.path.basename(file)) def close(self): self._tar.close()<|fim▁end|>
wrapper = FileWrapper(fin) response = HttpResponse(wrapper, content_type='application/tar') response['Content-Disposition'] = 'attachment; filename=commcarehq.tar'
<|file_name|>v1_rule_with_operations.py<|end_file_name|><|fim▁begin|># coding: utf-8 """ Kubernetes No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 The version of the OpenAPI document: release-1.23 Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 <|fim▁hole|> class V1RuleWithOperations(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'api_groups': 'list[str]', 'api_versions': 'list[str]', 'operations': 'list[str]', 'resources': 'list[str]', 'scope': 'str' } attribute_map = { 'api_groups': 'apiGroups', 'api_versions': 'apiVersions', 'operations': 'operations', 'resources': 'resources', 'scope': 'scope' } def __init__(self, api_groups=None, api_versions=None, operations=None, resources=None, scope=None, local_vars_configuration=None): # noqa: E501 """V1RuleWithOperations - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._api_groups = None self._api_versions = None self._operations = None self._resources = None self._scope = None self.discriminator = None if api_groups is not None: self.api_groups = api_groups if api_versions is not None: self.api_versions = api_versions if operations is not None: self.operations = operations if resources is not None: self.resources = resources if scope is not None: self.scope = scope @property def api_groups(self): """Gets the api_groups of this V1RuleWithOperations. # noqa: E501 APIGroups is the API groups the resources belong to. '*' is all groups. If '*' is present, the length of the slice must be one. Required. # noqa: E501 :return: The api_groups of this V1RuleWithOperations. # noqa: E501 :rtype: list[str] """ return self._api_groups @api_groups.setter def api_groups(self, api_groups): """Sets the api_groups of this V1RuleWithOperations. APIGroups is the API groups the resources belong to. '*' is all groups. If '*' is present, the length of the slice must be one. Required. # noqa: E501 :param api_groups: The api_groups of this V1RuleWithOperations. # noqa: E501 :type: list[str] """ self._api_groups = api_groups @property def api_versions(self): """Gets the api_versions of this V1RuleWithOperations. # noqa: E501 APIVersions is the API versions the resources belong to. '*' is all versions. If '*' is present, the length of the slice must be one. Required. # noqa: E501 :return: The api_versions of this V1RuleWithOperations. # noqa: E501 :rtype: list[str] """ return self._api_versions @api_versions.setter def api_versions(self, api_versions): """Sets the api_versions of this V1RuleWithOperations. APIVersions is the API versions the resources belong to. '*' is all versions. If '*' is present, the length of the slice must be one. Required. # noqa: E501 :param api_versions: The api_versions of this V1RuleWithOperations. # noqa: E501 :type: list[str] """ self._api_versions = api_versions @property def operations(self): """Gets the operations of this V1RuleWithOperations. # noqa: E501 Operations is the operations the admission hook cares about - CREATE, UPDATE, DELETE, CONNECT or * for all of those operations and any future admission operations that are added. If '*' is present, the length of the slice must be one. Required. # noqa: E501 :return: The operations of this V1RuleWithOperations. # noqa: E501 :rtype: list[str] """ return self._operations @operations.setter def operations(self, operations): """Sets the operations of this V1RuleWithOperations. Operations is the operations the admission hook cares about - CREATE, UPDATE, DELETE, CONNECT or * for all of those operations and any future admission operations that are added. If '*' is present, the length of the slice must be one. Required. # noqa: E501 :param operations: The operations of this V1RuleWithOperations. # noqa: E501 :type: list[str] """ self._operations = operations @property def resources(self): """Gets the resources of this V1RuleWithOperations. # noqa: E501 Resources is a list of resources this rule applies to. For example: 'pods' means pods. 'pods/log' means the log subresource of pods. '*' means all resources, but not subresources. 'pods/*' means all subresources of pods. '*/scale' means all scale subresources. '*/*' means all resources and their subresources. If wildcard is present, the validation rule will ensure resources do not overlap with each other. Depending on the enclosing object, subresources might not be allowed. Required. # noqa: E501 :return: The resources of this V1RuleWithOperations. # noqa: E501 :rtype: list[str] """ return self._resources @resources.setter def resources(self, resources): """Sets the resources of this V1RuleWithOperations. Resources is a list of resources this rule applies to. For example: 'pods' means pods. 'pods/log' means the log subresource of pods. '*' means all resources, but not subresources. 'pods/*' means all subresources of pods. '*/scale' means all scale subresources. '*/*' means all resources and their subresources. If wildcard is present, the validation rule will ensure resources do not overlap with each other. Depending on the enclosing object, subresources might not be allowed. Required. # noqa: E501 :param resources: The resources of this V1RuleWithOperations. # noqa: E501 :type: list[str] """ self._resources = resources @property def scope(self): """Gets the scope of this V1RuleWithOperations. # noqa: E501 scope specifies the scope of this rule. Valid values are \"Cluster\", \"Namespaced\", and \"*\" \"Cluster\" means that only cluster-scoped resources will match this rule. Namespace API objects are cluster-scoped. \"Namespaced\" means that only namespaced resources will match this rule. \"*\" means that there are no scope restrictions. Subresources match the scope of their parent resource. Default is \"*\". # noqa: E501 :return: The scope of this V1RuleWithOperations. # noqa: E501 :rtype: str """ return self._scope @scope.setter def scope(self, scope): """Sets the scope of this V1RuleWithOperations. scope specifies the scope of this rule. Valid values are \"Cluster\", \"Namespaced\", and \"*\" \"Cluster\" means that only cluster-scoped resources will match this rule. Namespace API objects are cluster-scoped. \"Namespaced\" means that only namespaced resources will match this rule. \"*\" means that there are no scope restrictions. Subresources match the scope of their parent resource. Default is \"*\". # noqa: E501 :param scope: The scope of this V1RuleWithOperations. # noqa: E501 :type: str """ self._scope = scope def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, V1RuleWithOperations): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, V1RuleWithOperations): return True return self.to_dict() != other.to_dict()<|fim▁end|>
import six from kubernetes.client.configuration import Configuration
<|file_name|>subscribe_group.py<|end_file_name|><|fim▁begin|>## www.pubnub.com - PubNub Real-time push service in the cloud. # coding=utf8 ## PubNub Real-time Push APIs and Notifications Framework ## Copyright (c) 2010 Stephen Blum ## http://www.pubnub.com/ import sys from pubnub import Pubnub as Pubnub publish_key = len(sys.argv) > 1 and sys.argv[1] or 'demo' subscribe_key = len(sys.argv) > 2 and sys.argv[2] or 'demo' secret_key = len(sys.argv) > 3 and sys.argv[3] or 'demo' cipher_key = len(sys.argv) > 4 and sys.argv[4] or 'abcd' ssl_on = len(sys.argv) > 5 and bool(sys.argv[5]) or False ## ----------------------------------------------------------------------- ## Initiate Pubnub State ## ----------------------------------------------------------------------- pubnub = Pubnub(publish_key=publish_key, subscribe_key=subscribe_key, secret_key=secret_key, cipher_key=cipher_key, ssl_on=ssl_on) channel = 'ab' # Asynchronous usage def callback_abc(message, channel, real_channel): print(str(message) + ' , ' + channel + ', ' + real_channel) pubnub.unsubscribe_group(channel_group='abc') #pubnub.stop()<|fim▁hole|> def error(message): print("ERROR : " + str(message)) def connect_abc(message): print("CONNECTED " + str(message)) def connect_d(message): print("CONNECTED " + str(message)) pubnub.unsubscribe(channel='d') def reconnect(message): print("RECONNECTED " + str(message)) def disconnect(message): print("DISCONNECTED " + str(message)) print pubnub.channel_group_add_channel(channel_group='abc', channel="b") pubnub.subscribe_group(channel_groups='abc', callback=callback_abc, error=error, connect=connect_abc, reconnect=reconnect, disconnect=disconnect) pubnub.subscribe(channels='d', callback=callback_d, error=error, connect=connect_d, reconnect=reconnect, disconnect=disconnect) pubnub.start()<|fim▁end|>
def callback_d(message, channel): print(str(message) + ' , ' + channel)
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # This file is part of Shoop. #<|fim▁hole|>from shoop.api.factories import viewset_factory from shoop.core.api.orders import OrderViewSet from shoop.core.api.products import ProductViewSet, ShopProductViewSet from shoop.core.models import Contact, Shop from shoop.core.models.categories import Category def populate_core_api(router): """ :param router: Router :type router: rest_framework.routers.DefaultRouter """ router.register("shoop/category", viewset_factory(Category)) router.register("shoop/contact", viewset_factory(Contact)) router.register("shoop/order", OrderViewSet) router.register("shoop/product", ProductViewSet) router.register("shoop/shop", viewset_factory(Shop)) router.register("shoop/shop_product", ShopProductViewSet)<|fim▁end|>
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree.
<|file_name|>test_client_websocket_35.py<|end_file_name|><|fim▁begin|>import pytest import aiohttp from aiohttp import web @pytest.mark.run_loop async def test_client_ws_async_for(loop, create_server):<|fim▁hole|> items = ['q1', 'q2', 'q3'] async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) for i in items: ws.send_str(i) await ws.close() return ws app, url = await create_server(proto='ws') app.router.add_route('GET', '/', handler) resp = await aiohttp.ws_connect(url, loop=loop) it = iter(items) async for msg in resp: assert msg.data == next(it) with pytest.raises(StopIteration): next(it) assert resp.closed @pytest.mark.run_loop async def test_client_ws_async_with(loop, create_app_and_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive() ws.send_str(msg.data + '/answer') await ws.close() return ws app, client = await create_app_and_client( server_params=dict(proto='ws')) app.router.add_route('GET', '/', handler) async with client.ws_connect('/') as ws: ws.send_str('request') msg = await ws.receive() assert msg.data == 'request/answer' assert ws.closed @pytest.mark.run_loop async def test_client_ws_async_with_shortcut(loop, create_server): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive() ws.send_str(msg.data + '/answer') await ws.close() return ws app, url = await create_server(proto='ws') app.router.add_route('GET', '/', handler) async with aiohttp.ws_connect(url, loop=loop) as ws: ws.send_str('request') msg = await ws.receive() assert msg.data == 'request/answer' assert ws.closed<|fim▁end|>
<|file_name|>UserSettingsEditComponent.ts<|end_file_name|><|fim▁begin|>import {Input, Component} from 'angular2/core'; import {Router, RouterLink, CanActivate} from 'angular2/router'; import {UserSettingsComponent} from '../userSettings/UserSettingsComponent'; import {UserSettingsService} from '../../shared/services/UserSettingsService'; import {AuthService} from '../../shared/services/AuthService'; import {AlertingService} from '../../shared/services/AlertingService'; <|fim▁hole|> PointerSize, PointerColor, BackgroundColor} from '../../shared/enums/UserSettingsEnums'; import {TranslatePipe} from 'ng2-translate/ng2-translate'; import {appInjector} from '../../../appInjector'; @Component({ directives: [RouterLink, UserSettingsComponent], templateUrl: './app/components/userSettingsEdit/userSettingsEdit.html', pipes: [TranslatePipe] }) @CanActivate( (nextInstr: any, currInstr: any) => { let injector: any = appInjector(); let authService: AuthService = injector.get(AuthService); let router: Router = injector.get(Router); let isLogged = authService.isLogged(); if (!isLogged) { router.navigate(['/Login']); } return isLogged; } ) export class UserSettingsEditComponent { public userName: string; public userSettings: UserSettings; public userSettingsForJar: string; constructor( private alertingService: AlertingService, private authService: AuthService, private userSettingsService: UserSettingsService) { this.userName = authService.getLoggedUser(); this.userSettingsService.getUserSettingsFor(this.userName) .subscribe(data => this.userSettings = data); this.userSettingsService.getUserSettingsForJar(this.userName) .subscribe(data => this.userSettingsForJar = data); } saveUserSettings(): void { this.userSettingsService.saveUserSettingsForUser(this.userName, this.userSettings) .subscribe(data => { this.alertingService.addSuccess('SAVE_USER_SETTINGS_SUCCESS_MESSAGE'); }, err => { this.alertingService.addDanger('SAVE_USER_SETTINGS_ERROR_MESSAGE'); }); } }<|fim▁end|>
import {UserSettings} from '../../shared/models/UserSettings'; import { PointerType,
<|file_name|>network_test.py<|end_file_name|><|fim▁begin|># ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2017, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import json import unittest try: # NOTE need to import capnp first to activate the magic necessary for # PythonDummyRegion_capnp, etc. import capnp except ImportError: capnp = None else: from nupic.proto.NetworkProto_capnp import NetworkProto import nupic.bindings.engine_internal as engine from nupic.bindings.tools.serialization_test_py_region import \ SerializationTestPyRegion class NetworkTest(unittest.TestCase): @unittest.skipUnless( capnp, "pycapnp is not installed, skipping serialization test.") def testCapnpSerializationWithPyRegion(self): """Test capnp (de)serialization of network containing a python region""" engine.Network.registerPyRegion(__name__, SerializationTestPyRegion.__name__) try: srcNet = engine.Network() srcNet.addRegion(SerializationTestPyRegion.__name__, "py." + SerializationTestPyRegion.__name__, json.dumps({ "dataWidth": 128, "randomSeed": 99, })) # Serialize builderProto = NetworkProto.new_message() srcNet.write(builderProto) # Construct NetworkProto reader from populated builder readerProto = NetworkProto.from_bytes(builderProto.to_bytes()) # Deserialize destNet = engine.Network.read(readerProto) destRegion = destNet.getRegions().getByName( SerializationTestPyRegion.__name__) self.assertEqual(destRegion.getParameterUInt32("dataWidth"), 128) self.assertEqual(destRegion.getParameterUInt32("randomSeed"), 99) <|fim▁hole|> finally: engine.Network.unregisterPyRegion(SerializationTestPyRegion.__name__) def testSimpleTwoRegionNetworkIntrospection(self): # Create Network instance network = engine.Network() # Add two TestNode regions to network network.addRegion("region1", "TestNode", "") network.addRegion("region2", "TestNode", "") # Set dimensions on first region region1 = network.getRegions().getByName("region1") region1.setDimensions(engine.Dimensions([1, 1])) # Link region1 and region2 network.link("region1", "region2", "UniformLink", "") # Initialize network network.initialize() for linkName, link in network.getLinks(): # Compare Link API to what we know about the network self.assertEqual(link.toString(), linkName) self.assertEqual(link.getDestRegionName(), "region2") self.assertEqual(link.getSrcRegionName(), "region1") self.assertEqual(link.getLinkType(), "UniformLink") self.assertEqual(link.getDestInputName(), "bottomUpIn") self.assertEqual(link.getSrcOutputName(), "bottomUpOut") break else: self.fail("Unable to iterate network links.")<|fim▁end|>
<|file_name|>test_print.py<|end_file_name|><|fim▁begin|>from .. utils import TranspileTestCase, BuiltinFunctionTestCase class PrintTests(TranspileTestCase): def test_fileobj(self): self.assertCodeExecution(""" class FileLikeObject: def __init__(self): self.buffer = '' def write(self, content): self.buffer = self.buffer + (content * 2) out = FileLikeObject() print('hello', 'world', file=out) print('goodbye', 'world', file=out) print() """) def test_sep(self): self.assertCodeExecution(""" print('hello world', 'goodbye world', sep='-') print() """) def test_end(self): self.assertCodeExecution(""" print('hello world', 'goodbye world', end='-') print() """)<|fim▁hole|> print('hello world', 'goodbye world', flush=True) print() """) def test_combined(self): self.assertCodeExecution(""" class FileLikeObject: def __init__(self): self.buffer = '' def write(self, content): self.buffer = self.buffer + (content * 2) def flush(self): self.buffer = self.buffer + '<<<' out = FileLikeObject() print('hello', 'world', sep='*', end='-', file=out, flush=True) print('goodbye', 'world', file=out, sep='-', end='*') print() """) class BuiltinPrintFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): functions = ["print"] not_implemented = [ 'test_class', 'test_frozenset', 'test_slice', ]<|fim▁end|>
def test_flush(self): self.assertCodeExecution("""
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import clearSuggestedActions from './actions/clearSuggestedActions'; import connect from './actions/connect'; import createStore, { withDevTools as createStoreWithDevTools } from './createStore'; import disconnect from './actions/disconnect'; import dismissNotification from './actions/dismissNotification'; import emitTypingIndicator from './actions/emitTypingIndicator'; import markActivity from './actions/markActivity';<|fim▁hole|>import sendFiles from './actions/sendFiles'; import sendMessage from './actions/sendMessage'; import sendMessageBack from './actions/sendMessageBack'; import sendPostBack from './actions/sendPostBack'; import setDictateInterims from './actions/setDictateInterims'; import setDictateState from './actions/setDictateState'; import setLanguage from './actions/setLanguage'; import setNotification from './actions/setNotification'; import setSendBox from './actions/setSendBox'; import setSendTimeout from './actions/setSendTimeout'; import setSendTypingIndicator from './actions/setSendTypingIndicator'; import startDictate from './actions/startDictate'; import startSpeakingActivity from './actions/startSpeakingActivity'; import stopDictate from './actions/stopDictate'; import stopSpeakingActivity from './actions/stopSpeakingActivity'; import submitSendBox from './actions/submitSendBox'; import * as ActivityClientState from './constants/ActivityClientState'; import * as DictateState from './constants/DictateState'; const Constants = { ActivityClientState, DictateState }; const version = process.env.npm_package_version; export { clearSuggestedActions, connect, Constants, createStore, createStoreWithDevTools, disconnect, dismissNotification, emitTypingIndicator, markActivity, postActivity, sendEvent, sendFiles, sendMessage, sendMessageBack, sendPostBack, setDictateInterims, setDictateState, setLanguage, setNotification, setSendBox, setSendTimeout, setSendTypingIndicator, startDictate, startSpeakingActivity, stopDictate, stopSpeakingActivity, submitSendBox, version };<|fim▁end|>
import postActivity from './actions/postActivity'; import sendEvent from './actions/sendEvent';
<|file_name|>rolebinding.go<|end_file_name|><|fim▁begin|>/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by applyconfiguration-gen. DO NOT EDIT. package v1 import ( apirbacv1 "k8s.io/api/rbac/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" managedfields "k8s.io/apimachinery/pkg/util/managedfields" internal "k8s.io/client-go/applyconfigurations/internal" v1 "k8s.io/client-go/applyconfigurations/meta/v1" ) // RoleBindingApplyConfiguration represents an declarative configuration of the RoleBinding type for use // with apply. type RoleBindingApplyConfiguration struct { v1.TypeMetaApplyConfiguration `json:",inline"` *v1.ObjectMetaApplyConfiguration `json:"metadata,omitempty"` Subjects []SubjectApplyConfiguration `json:"subjects,omitempty"` RoleRef *RoleRefApplyConfiguration `json:"roleRef,omitempty"` } // RoleBinding constructs an declarative configuration of the RoleBinding type for use with // apply. func RoleBinding(name, namespace string) *RoleBindingApplyConfiguration { b := &RoleBindingApplyConfiguration{} b.WithName(name) b.WithNamespace(namespace) b.WithKind("RoleBinding") b.WithAPIVersion("rbac.authorization.k8s.io/v1") return b } // ExtractRoleBinding extracts the applied configuration owned by fieldManager from // roleBinding. If no managedFields are found in roleBinding for fieldManager, a // RoleBindingApplyConfiguration is returned with only the Name, Namespace (if applicable), // APIVersion and Kind populated. It is possible that no managed fields were found for because other // field managers have taken ownership of all the fields previously owned by fieldManager, or because // the fieldManager never owned fields any fields. // roleBinding must be a unmodified RoleBinding API object that was retrieved from the Kubernetes API. // ExtractRoleBinding provides a way to perform a extract/modify-in-place/apply workflow. // Note that an extracted apply configuration will contain fewer fields than what the fieldManager previously // applied if another fieldManager has updated or force applied any of the previously applied fields. // Experimental! func ExtractRoleBinding(roleBinding *apirbacv1.RoleBinding, fieldManager string) (*RoleBindingApplyConfiguration, error) { return extractRoleBinding(roleBinding, fieldManager, "") } // ExtractRoleBindingStatus is the same as ExtractRoleBinding except // that it extracts the status subresource applied configuration. // Experimental! func ExtractRoleBindingStatus(roleBinding *apirbacv1.RoleBinding, fieldManager string) (*RoleBindingApplyConfiguration, error) { return extractRoleBinding(roleBinding, fieldManager, "status") } func extractRoleBinding(roleBinding *apirbacv1.RoleBinding, fieldManager string, subresource string) (*RoleBindingApplyConfiguration, error) { b := &RoleBindingApplyConfiguration{} err := managedfields.ExtractInto(roleBinding, internal.Parser().Type("io.k8s.api.rbac.v1.RoleBinding"), fieldManager, b, subresource) if err != nil { return nil, err } b.WithName(roleBinding.Name) b.WithNamespace(roleBinding.Namespace) b.WithKind("RoleBinding") b.WithAPIVersion("rbac.authorization.k8s.io/v1") return b, nil } // WithKind sets the Kind field in the declarative configuration to the given value // and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the Kind field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithKind(value string) *RoleBindingApplyConfiguration { b.Kind = &value return b } // WithAPIVersion sets the APIVersion field in the declarative configuration to the given value // and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the APIVersion field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithAPIVersion(value string) *RoleBindingApplyConfiguration { b.APIVersion = &value return b } // WithName sets the Name field in the declarative configuration to the given value // and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the Name field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithName(value string) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() b.Name = &value return b } // WithGenerateName sets the GenerateName field in the declarative configuration to the given value // and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the GenerateName field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithGenerateName(value string) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() b.GenerateName = &value return b } <|fim▁hole|>// and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the Namespace field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithNamespace(value string) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() b.Namespace = &value return b } // WithSelfLink sets the SelfLink field in the declarative configuration to the given value // and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the SelfLink field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithSelfLink(value string) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() b.SelfLink = &value return b } // WithUID sets the UID field in the declarative configuration to the given value // and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the UID field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithUID(value types.UID) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() b.UID = &value return b } // WithResourceVersion sets the ResourceVersion field in the declarative configuration to the given value // and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the ResourceVersion field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithResourceVersion(value string) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() b.ResourceVersion = &value return b } // WithGeneration sets the Generation field in the declarative configuration to the given value // and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the Generation field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithGeneration(value int64) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() b.Generation = &value return b } // WithCreationTimestamp sets the CreationTimestamp field in the declarative configuration to the given value // and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the CreationTimestamp field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithCreationTimestamp(value metav1.Time) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() b.CreationTimestamp = &value return b } // WithDeletionTimestamp sets the DeletionTimestamp field in the declarative configuration to the given value // and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the DeletionTimestamp field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithDeletionTimestamp(value metav1.Time) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() b.DeletionTimestamp = &value return b } // WithDeletionGracePeriodSeconds sets the DeletionGracePeriodSeconds field in the declarative configuration to the given value // and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the DeletionGracePeriodSeconds field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithDeletionGracePeriodSeconds(value int64) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() b.DeletionGracePeriodSeconds = &value return b } // WithLabels puts the entries into the Labels field in the declarative configuration // and returns the receiver, so that objects can be build by chaining "With" function invocations. // If called multiple times, the entries provided by each call will be put on the Labels field, // overwriting an existing map entries in Labels field with the same key. func (b *RoleBindingApplyConfiguration) WithLabels(entries map[string]string) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() if b.Labels == nil && len(entries) > 0 { b.Labels = make(map[string]string, len(entries)) } for k, v := range entries { b.Labels[k] = v } return b } // WithAnnotations puts the entries into the Annotations field in the declarative configuration // and returns the receiver, so that objects can be build by chaining "With" function invocations. // If called multiple times, the entries provided by each call will be put on the Annotations field, // overwriting an existing map entries in Annotations field with the same key. func (b *RoleBindingApplyConfiguration) WithAnnotations(entries map[string]string) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() if b.Annotations == nil && len(entries) > 0 { b.Annotations = make(map[string]string, len(entries)) } for k, v := range entries { b.Annotations[k] = v } return b } // WithOwnerReferences adds the given value to the OwnerReferences field in the declarative configuration // and returns the receiver, so that objects can be build by chaining "With" function invocations. // If called multiple times, values provided by each call will be appended to the OwnerReferences field. func (b *RoleBindingApplyConfiguration) WithOwnerReferences(values ...*v1.OwnerReferenceApplyConfiguration) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() for i := range values { if values[i] == nil { panic("nil value passed to WithOwnerReferences") } b.OwnerReferences = append(b.OwnerReferences, *values[i]) } return b } // WithFinalizers adds the given value to the Finalizers field in the declarative configuration // and returns the receiver, so that objects can be build by chaining "With" function invocations. // If called multiple times, values provided by each call will be appended to the Finalizers field. func (b *RoleBindingApplyConfiguration) WithFinalizers(values ...string) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() for i := range values { b.Finalizers = append(b.Finalizers, values[i]) } return b } // WithClusterName sets the ClusterName field in the declarative configuration to the given value // and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the ClusterName field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithClusterName(value string) *RoleBindingApplyConfiguration { b.ensureObjectMetaApplyConfigurationExists() b.ClusterName = &value return b } func (b *RoleBindingApplyConfiguration) ensureObjectMetaApplyConfigurationExists() { if b.ObjectMetaApplyConfiguration == nil { b.ObjectMetaApplyConfiguration = &v1.ObjectMetaApplyConfiguration{} } } // WithSubjects adds the given value to the Subjects field in the declarative configuration // and returns the receiver, so that objects can be build by chaining "With" function invocations. // If called multiple times, values provided by each call will be appended to the Subjects field. func (b *RoleBindingApplyConfiguration) WithSubjects(values ...*SubjectApplyConfiguration) *RoleBindingApplyConfiguration { for i := range values { if values[i] == nil { panic("nil value passed to WithSubjects") } b.Subjects = append(b.Subjects, *values[i]) } return b } // WithRoleRef sets the RoleRef field in the declarative configuration to the given value // and returns the receiver, so that objects can be built by chaining "With" function invocations. // If called multiple times, the RoleRef field is set to the value of the last call. func (b *RoleBindingApplyConfiguration) WithRoleRef(value *RoleRefApplyConfiguration) *RoleBindingApplyConfiguration { b.RoleRef = value return b }<|fim▁end|>
// WithNamespace sets the Namespace field in the declarative configuration to the given value
<|file_name|>classCqrs_1_1MongoDB_1_1Serialisers_1_1TypeSerialiser.js<|end_file_name|><|fim▁begin|>var classCqrs_1_1MongoDB_1_1Serialisers_1_1TypeSerialiser = [<|fim▁hole|>];<|fim▁end|>
[ "Deserialize", "classCqrs_1_1MongoDB_1_1Serialisers_1_1TypeSerialiser_a5e8aa7ae1372033da215d02b79947b20.html#a5e8aa7ae1372033da215d02b79947b20", null ], [ "Serialize", "classCqrs_1_1MongoDB_1_1Serialisers_1_1TypeSerialiser_a4aec60f5df74f482b576f4e0dad0d5f6.html#a4aec60f5df74f482b576f4e0dad0d5f6", null ], [ "Serialize", "classCqrs_1_1MongoDB_1_1Serialisers_1_1TypeSerialiser_a2362ae784859054bf5b9281dafeb37cd.html#a2362ae784859054bf5b9281dafeb37cd", null ], [ "ValueType", "classCqrs_1_1MongoDB_1_1Serialisers_1_1TypeSerialiser_af5d06e2fe995f816c840a8ceefd22991.html#af5d06e2fe995f816c840a8ceefd22991", null ]
<|file_name|>package.go<|end_file_name|><|fim▁begin|>/* Copyright IBM Corp. 2016 All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package golang import ( "archive/tar" "errors" "fmt" "bytes" "encoding/hex" "io/ioutil" "os" "os/exec" "path/filepath" "strings" "time" "github.com/golang/protobuf/proto" "github.com/hyperledger/fabric/common/flogging" "github.com/hyperledger/fabric/common/util" ccutil "github.com/hyperledger/fabric/core/chaincode/platforms/util" cutil "github.com/hyperledger/fabric/core/container/util" pb "github.com/hyperledger/fabric/protos/peer" "github.com/spf13/viper" ) var includeFileTypes = map[string]bool{ ".c": true, ".h": true, ".go": true, ".yaml": true, ".json": true, } var logger = flogging.MustGetLogger("golang-platform") func getCodeFromHTTP(path string) (codegopath string, err error) { codegopath = "" err = nil logger.Debugf("getCodeFromHTTP %s", path) // The following could be done with os.Getenv("GOPATH") but we need to change it later so this prepares for that next step env := os.Environ() var origgopath string var gopathenvIndex int for i, v := range env { if strings.Index(v, "GOPATH=") == 0 { p := strings.SplitAfter(v, "GOPATH=") origgopath = p[1] gopathenvIndex = i break } } if origgopath == "" { err = errors.New("GOPATH not defined") return } // Only take the first element of GOPATH gopath := filepath.SplitList(origgopath)[0] // Define a new gopath in which to download the code newgopath := filepath.Join(gopath, "_usercode_") //ignore errors.. _usercode_ might exist. TempDir will catch any other errors os.Mkdir(newgopath, 0755) if codegopath, err = ioutil.TempDir(newgopath, ""); err != nil { err = fmt.Errorf("could not create tmp dir under %s(%s)", newgopath, err) return } //go paths can have multiple dirs. We create a GOPATH with two source tree's as follows // // <temporary empty folder to download chaincode source> : <local go path with OBC source> // //This approach has several goodness: // . Go will pick the first path to download user code (which we will delete after processing) // . GO will not download OBC as it is in the second path. GO will use the local OBC for generating chaincode image // . network savings // . more secure // . as we are not downloading OBC, private, password-protected OBC repo's become non-issue env[gopathenvIndex] = "GOPATH=" + codegopath + string(os.PathListSeparator) + origgopath // Use a 'go get' command to pull the chaincode from the given repo logger.Debugf("go get %s", path) cmd := exec.Command("go", "get", path) cmd.Env = env var out bytes.Buffer cmd.Stdout = &out var errBuf bytes.Buffer cmd.Stderr = &errBuf //capture Stderr and print it on error err = cmd.Start() // Create a go routine that will wait for the command to finish done := make(chan error, 1) go func() { done <- cmd.Wait() }() select { case <-time.After(time.Duration(viper.GetInt("chaincode.deploytimeout")) * time.Millisecond): // If pulling repos takes too long, we should give up // (This can happen if a repo is private and the git clone asks for credentials) if err = cmd.Process.Kill(); err != nil { err = fmt.Errorf("failed to kill: %s", err) } else { err = errors.New("Getting chaincode took too long") } case err = <-done: // If we're here, the 'go get' command must have finished if err != nil { err = fmt.Errorf("'go get' failed with error: \"%s\"\n%s", err, string(errBuf.Bytes())) } } return } func getCodeFromFS(path string) (codegopath string, err error) { logger.Debugf("getCodeFromFS %s", path) gopath := os.Getenv("GOPATH") if gopath == "" { err = errors.New("GOPATH not defined") return } // Only take the first element of GOPATH codegopath = filepath.SplitList(gopath)[0] return } //collectChaincodeFiles collects chaincode files and generates hashcode for the //package. If path is a HTTP(s) url it downloads the code first. //NOTE: for dev mode, user builds and runs chaincode manually. The name provided //by the user is equivalent to the path. This method will treat the name //as codebytes and compute the hash from it. ie, user cannot run the chaincode //with the same (name, input, args) func collectChaincodeFiles(spec *pb.ChaincodeSpec, tw *tar.Writer) (string, error) { if spec == nil { return "", errors.New("Cannot collect files from nil spec") } chaincodeID := spec.ChaincodeId if chaincodeID == nil || chaincodeID.Path == "" { return "", errors.New("Cannot collect files from empty chaincode path") } //install will not have inputs and we don't have to collect hash for it var inputbytes []byte var err error if spec.Input == nil || len(spec.Input.Args) == 0 { logger.Debugf("not using input for hash computation for %v ", chaincodeID) } else { inputbytes, err = proto.Marshal(spec.Input) if err != nil { return "", fmt.Errorf("Error marshalling constructor: %s", err) } } //code root will point to the directory where the code exists //in the case of http it will be a temporary dir that //will have to be deleted var codegopath string var ishttp bool defer func() { if ishttp && codegopath != "" { os.RemoveAll(codegopath) } }() path := chaincodeID.Path var actualcodepath string if strings.HasPrefix(path, "http://") { ishttp = true actualcodepath = path[7:] codegopath, err = getCodeFromHTTP(actualcodepath) } else if strings.HasPrefix(path, "https://") { ishttp = true actualcodepath = path[8:] codegopath, err = getCodeFromHTTP(actualcodepath) } else { actualcodepath = path codegopath, err = getCodeFromFS(path) } if err != nil { return "", fmt.Errorf("Error getting code %s", err) } tmppath := filepath.Join(codegopath, "src", actualcodepath) if err = ccutil.IsCodeExist(tmppath); err != nil { return "", fmt.Errorf("code does not exist %s", err) } hash := []byte{} if inputbytes != nil { hash = util.GenerateHashFromSignature(actualcodepath, inputbytes) } hash, err = ccutil.HashFilesInDir(filepath.Join(codegopath, "src"), actualcodepath, hash, tw) if err != nil { return "", fmt.Errorf("Could not get hashcode for %s - %s\n", path, err) } return hex.EncodeToString(hash[:]), nil } //WriteGopathSrc tars up files under gopath src func writeGopathSrc(tw *tar.Writer, excludeDir string) error { gopath := os.Getenv("GOPATH") // Only take the first element of GOPATH gopath = filepath.SplitList(gopath)[0] rootDirectory := filepath.Join(gopath, "src") logger.Infof("rootDirectory = %s", rootDirectory) if err := cutil.WriteFolderToTarPackage(tw, rootDirectory, excludeDir, includeFileTypes, nil); err != nil { logger.Errorf("Error writing folder to tar package %s", err) return err } // Write the tar file out if err := tw.Close(); err != nil { return err<|fim▁hole|> } //ioutil.WriteFile("/tmp/chaincode_deployment.tar", inputbuf.Bytes(), 0644) return nil } //tw is expected to have the chaincode in it from GenerateHashcode. This method //will just package rest of the bytes func writeChaincodePackage(spec *pb.ChaincodeSpec, tw *tar.Writer) error { urlLocation, err := decodeUrl(spec) if err != nil { return fmt.Errorf("could not decode url: %s", err) } err = writeGopathSrc(tw, urlLocation) if err != nil { return fmt.Errorf("Error writing Chaincode package contents: %s", err) } return nil }<|fim▁end|>
<|file_name|>test_floatingips.py<|end_file_name|><|fim▁begin|># Copyright 2015 Hewlett-Packard Development Company, L.P # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack_dashboard.test.integration_tests import helpers from openstack_dashboard.test.integration_tests.regions import messages class TestFloatingip(helpers.TestCase): """Checks that the user is able to allocate/release floatingip.""" def test_floatingip(self): floatingip_page = \ self.home_pg.go_to_compute_accessandsecurity_floatingipspage() floating_ip = floatingip_page.allocate_floatingip() self.assertTrue( floatingip_page.find_message_and_dismiss(messages.SUCCESS)) self.assertFalse( floatingip_page.find_message_and_dismiss(messages.ERROR)) self.assertTrue(floatingip_page.is_floatingip_present(floating_ip)) floatingip_page.release_floatingip(floating_ip) self.assertTrue( floatingip_page.find_message_and_dismiss(messages.SUCCESS)) self.assertFalse( floatingip_page.find_message_and_dismiss(messages.ERROR)) self.assertFalse(floatingip_page.is_floatingip_present(floating_ip)) class TestFloatingipAssociateDisassociate(helpers.TestCase): """Checks that the user is able to Associate/Disassociate floatingip.""" def test_floatingip_associate_disassociate(self): instance_name = helpers.gen_random_resource_name('instance', timestamp=False) instances_page = self.home_pg.go_to_compute_instancespage() instances_page.create_instance(instance_name) self.assertTrue( instances_page.find_message_and_dismiss(messages.SUCCESS)) self.assertFalse( instances_page.find_message_and_dismiss(messages.ERROR)) self.assertTrue(instances_page.is_instance_active(instance_name)) instance_ipv4 = instances_page.get_fixed_ipv4(instance_name) instance_info = "{} {}".format(instance_name, instance_ipv4) floatingip_page = \ self.home_pg.go_to_compute_accessandsecurity_floatingipspage() floating_ip = floatingip_page.allocate_floatingip() self.assertTrue( floatingip_page.find_message_and_dismiss(messages.SUCCESS)) self.assertFalse( floatingip_page.find_message_and_dismiss(messages.ERROR)) self.assertTrue(floatingip_page.is_floatingip_present(floating_ip)) self.assertEqual('-', floatingip_page.get_fixed_ip(floating_ip)) floatingip_page.associate_floatingip(floating_ip, instance_name, instance_ipv4) self.assertTrue( floatingip_page.find_message_and_dismiss(messages.SUCCESS)) self.assertFalse( floatingip_page.find_message_and_dismiss(messages.ERROR)) self.assertEqual(instance_info, floatingip_page.get_fixed_ip(floating_ip))<|fim▁hole|> floatingip_page.disassociate_floatingip(floating_ip) self.assertTrue( floatingip_page.find_message_and_dismiss(messages.SUCCESS)) self.assertFalse( floatingip_page.find_message_and_dismiss(messages.ERROR)) self.assertEqual('-', floatingip_page.get_fixed_ip(floating_ip)) floatingip_page.release_floatingip(floating_ip) self.assertTrue( floatingip_page.find_message_and_dismiss(messages.SUCCESS)) self.assertFalse( floatingip_page.find_message_and_dismiss(messages.ERROR)) self.assertFalse(floatingip_page.is_floatingip_present(floating_ip)) instances_page = self.home_pg.go_to_compute_instancespage() instances_page.delete_instance(instance_name) self.assertTrue( instances_page.find_message_and_dismiss(messages.SUCCESS)) self.assertFalse( instances_page.find_message_and_dismiss(messages.ERROR)) self.assertTrue(instances_page.is_instance_deleted(instance_name))<|fim▁end|>
<|file_name|>io.rs<|end_file_name|><|fim▁begin|>/*! This module provides some miscellaneous IO support routines. */ use std::io::{IoError, IoResult, OtherIoError}; <|fim▁hole|>/** Reads a line of input from the given `Reader`. This does not require a push-back buffer. It returns the line *with* the line terminator. Note that this function *does not* support old-school Mac OS newlines (i.e. a single carriage return). If it encounters a carriage return which is *not* immediately followed by a line feed, the carriage return will be included as part of the line. */ pub fn read_line<R: Reader>(r: &mut R) -> IoResult<String> { let mut line = String::new(); loop { match read_utf8_char(r) { Ok('\n') => { line.push('\n'); break; }, Ok(c) => { line.push(c); } Err(err) => { if err.kind == ::std::io::EndOfFile && line.len() > 0 { break } else { return Err(err) } } } } Ok(line) } #[test] fn test_read_line() { use std::borrow::ToOwned; let s = "line one\nline two\r\nline three\n"; let mut r = ::std::io::BufReader::new(s.as_bytes()); let oks = |s:&str| Ok(s.to_owned()); assert_eq!(read_line(&mut r), oks("line one\n")); assert_eq!(read_line(&mut r), oks("line two\r\n")); assert_eq!(read_line(&mut r), oks("line three\n")); } /** Reads a single UTF-8 encoded Unicode code point from a `Reader`. */ pub fn read_utf8_char<R: Reader>(r: &mut R) -> IoResult<char> { fn invalid_utf8<T>(b: u8, initial: bool) -> IoResult<T> { Err(IoError { kind: OtherIoError, desc: "invalid utf-8 sequence", detail: if initial { Some(format!("invalid initial code unit {:#02x}", b)) } else { Some(format!("invalid continuation code unit {:#02x}", b)) } }) } fn invalid_cp<T>(cp: u32) -> IoResult<T> { Err(IoError { kind: OtherIoError, desc: "invalid Unicode code point", detail: Some(format!("invalid code point {:#08x}", cp)) }) } // Why not use std::str::utf8_char_width? We need to know the encoding to mask away the size bits anyway. let (mut cp, n) = match try!(r.read_u8()) { b @ 0b0000_0000 ... 0b0111_1111 => (b as u32, 0), b @ 0b1100_0000 ... 0b1101_1111 => ((b & 0b0001_1111) as u32, 1), b @ 0b1110_0000 ... 0b1110_1111 => ((b & 0b0000_1111) as u32, 2), b @ 0b1111_0000 ... 0b1111_0111 => ((b & 0b0000_0111) as u32, 3), b @ 0b1111_1000 ... 0b1111_1011 => ((b & 0b0000_0011) as u32, 4), b @ 0b1111_1100 ... 0b1111_1101 => ((b & 0b0000_0001) as u32, 5), b => return invalid_utf8(b, true) }; for _ in range(0u, n) { let b = match try!(r.read_u8()) { b @ 0b10_000000 ... 0b10_111111 => (b & 0b00_111111) as u32, b => return invalid_utf8(b, false) }; cp = (cp << 6) | b; } ::std::char::from_u32(cp) .map(|c| Ok(c)) .unwrap_or_else(|| invalid_cp(cp)) } #[test] fn test_read_utf8_char() { fn test_str(s: &str) { let mut reader = ::std::io::BufReader::new(s.as_bytes()); for c in s.chars() { assert_eq!(Ok(c), read_utf8_char(&mut reader)) } } fn first(s: &[u8]) -> IoResult<char> { let mut reader = ::std::io::BufReader::new(s); read_utf8_char(&mut reader) } test_str("abcdef"); test_str("私の日本語わ下手ですよ!"); assert!(first(&[0b1000_0000u8]).is_err()); assert!(first(&[0b1100_0000u8, 0b0000_0000]).is_err()); } /** Reads a single line from standard input. */ pub fn stdin_read_line() -> IoResult<String> { read_line(&mut ::std::io::stdio::stdin_raw()) }<|fim▁end|>
<|file_name|>test_add_group.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import pytest from model.group import Group<|fim▁hole|> fixture = Application() request.addfinalizer(fixture.destroy) return fixture def test_add_group(app): app.session.login(username="admin", password="secret") app.group.create(Group(name="fgfg", header="fgfg", footer="fgfgfgfg")) app.session.logout()<|fim▁end|>
from fixture.application import Application @pytest.fixture def app(request):
<|file_name|>caches_test.go<|end_file_name|><|fim▁begin|>/* Real-time Online/Offline Charging System (OCS) for Telecom & ISP environments Copyright (C) ITsysCOM GmbH This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/> */ package dispatchers import ( "testing" "time" "github.com/cgrates/cgrates/config" "github.com/cgrates/cgrates/utils" "github.com/cgrates/ltcache" ) func TestDspCacheSv1PingError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.CGREvent{ Tenant: "tenant", ID: "", Time: &time.Time{}, Event: nil, APIOpts: nil, } var reply *string result := dspSrv.CacheSv1Ping(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1PingErrorArgs(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) var reply *string result := dspSrv.CacheSv1Ping(nil, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1PingErrorAttributeSConns(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.CGREvent{ Tenant: "tenant", ID: "", Time: &time.Time{}, Event: nil, APIOpts: nil, } var reply *string result := dspSrv.CacheSv1Ping(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1GetItemIDsError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.ArgsGetCacheItemIDsWithAPIOpts{} var reply *[]string result := dspSrv.CacheSv1GetItemIDs(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1GetItemIDsErrorArgsNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.ArgsGetCacheItemIDsWithAPIOpts{ Tenant: "tenant", } var reply *[]string result := dspSrv.CacheSv1GetItemIDs(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1HasItemError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.ArgsGetCacheItemWithAPIOpts{} var reply *bool result := dspSrv.CacheSv1HasItem(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1HasItemErrorArgsNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.ArgsGetCacheItemWithAPIOpts{ Tenant: "tenant", } var reply *bool result := dspSrv.CacheSv1HasItem(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1GetItemExpiryTimeCacheSv1GetItemExpiryTimeError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.ArgsGetCacheItemWithAPIOpts{} var reply *time.Time result := dspSrv.CacheSv1GetItemExpiryTime(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1GetItemExpiryTimeCacheSv1GetItemExpiryTimeErrorArgsNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.ArgsGetCacheItemWithAPIOpts{ Tenant: "tenant", } var reply *time.Time result := dspSrv.CacheSv1GetItemExpiryTime(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1RemoveItemError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.ArgsGetCacheItemWithAPIOpts{} var reply *string result := dspSrv.CacheSv1RemoveItem(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1RemoveItemArgsNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.ArgsGetCacheItemWithAPIOpts{ Tenant: "tenant", } var reply *string result := dspSrv.CacheSv1RemoveItem(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1RemoveItemsError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.AttrReloadCacheWithAPIOpts{} var reply *string result := dspSrv.CacheSv1RemoveItems(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1RemoveItemsArgsNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.AttrReloadCacheWithAPIOpts{ Tenant: "tenant", } var reply *string result := dspSrv.CacheSv1RemoveItems(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1ClearError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.AttrCacheIDsWithAPIOpts{} var reply *string result := dspSrv.CacheSv1Clear(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1ClearArgsNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.AttrCacheIDsWithAPIOpts{ Tenant: "tenant", } var reply *string result := dspSrv.CacheSv1Clear(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1GetCacheStatsError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.AttrCacheIDsWithAPIOpts{} var reply *map[string]*ltcache.CacheStats result := dspSrv.CacheSv1GetCacheStats(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1GetCacheStatsArgsNil(t *testing.T) {<|fim▁hole|> cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.AttrCacheIDsWithAPIOpts{ Tenant: "tenant", } var reply *map[string]*ltcache.CacheStats result := dspSrv.CacheSv1GetCacheStats(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1PrecacheStatusError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.AttrCacheIDsWithAPIOpts{} var reply *map[string]string result := dspSrv.CacheSv1PrecacheStatus(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1PrecacheStatusArgsNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.AttrCacheIDsWithAPIOpts{ Tenant: "tenant", } var reply *map[string]string result := dspSrv.CacheSv1PrecacheStatus(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1HasGroupError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.ArgsGetGroupWithAPIOpts{} var reply *bool result := dspSrv.CacheSv1HasGroup(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1HasGroupArgsNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.ArgsGetGroupWithAPIOpts{ Tenant: "tenant", } var reply *bool result := dspSrv.CacheSv1HasGroup(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1GetGroupItemIDsError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.ArgsGetGroupWithAPIOpts{} var reply *[]string result := dspSrv.CacheSv1GetGroupItemIDs(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1GetGroupItemIDsArgsNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.ArgsGetGroupWithAPIOpts{ Tenant: "tenant", } var reply *[]string result := dspSrv.CacheSv1GetGroupItemIDs(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1RemoveGroupError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.ArgsGetGroupWithAPIOpts{} var reply *string result := dspSrv.CacheSv1RemoveGroup(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1RemoveGroupArgsNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.ArgsGetGroupWithAPIOpts{ Tenant: "tenant", } var reply *string result := dspSrv.CacheSv1RemoveGroup(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1ReloadCacheError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.AttrReloadCacheWithAPIOpts{} var reply *string result := dspSrv.CacheSv1ReloadCache(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1ReloadCacheNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.AttrReloadCacheWithAPIOpts{ Tenant: "tenant", } var reply *string result := dspSrv.CacheSv1ReloadCache(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1LoadCacheError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.AttrReloadCacheWithAPIOpts{} var reply *string result := dspSrv.CacheSv1LoadCache(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1LoadCacheNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.AttrReloadCacheWithAPIOpts{ Tenant: "tenant", } var reply *string result := dspSrv.CacheSv1LoadCache(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1ReplicateRemoveError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.ArgCacheReplicateRemove{} var reply *string result := dspSrv.CacheSv1ReplicateRemove(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1ReplicateRemoveNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.ArgCacheReplicateRemove{ Tenant: "tenant", } var reply *string result := dspSrv.CacheSv1ReplicateRemove(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1ReplicateSetError(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) cgrCfg.DispatcherSCfg().AttributeSConns = []string{"test"} CGREvent := &utils.ArgCacheReplicateSet{} var reply *string result := dspSrv.CacheSv1ReplicateSet(CGREvent, reply) expected := "MANDATORY_IE_MISSING: [ApiKey]" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } } func TestDspCacheSv1ReplicateSetNil(t *testing.T) { cgrCfg := config.NewDefaultCGRConfig() dspSrv := NewDispatcherService(nil, cgrCfg, nil, nil) CGREvent := &utils.ArgCacheReplicateSet{ Tenant: "tenant", } var reply *string result := dspSrv.CacheSv1ReplicateSet(CGREvent, reply) expected := "DISPATCHER_ERROR:NO_DATABASE_CONNECTION" if result == nil || result.Error() != expected { t.Errorf("\nExpected <%+v>, \nReceived <%+v>", expected, result) } }<|fim▁end|>
<|file_name|>add_test.go<|end_file_name|><|fim▁begin|>package coreunix import ( "bytes" "context" "io" "io/ioutil" "math/rand" "os" "testing" "time" "github.com/ipfs/go-ipfs/blocks/blockstore" "github.com/ipfs/go-ipfs/blockservice" "github.com/ipfs/go-ipfs/commands/files" "github.com/ipfs/go-ipfs/core" dag "github.com/ipfs/go-ipfs/merkledag" "github.com/ipfs/go-ipfs/pin/gc" "github.com/ipfs/go-ipfs/repo" "github.com/ipfs/go-ipfs/repo/config" ds2 "github.com/ipfs/go-ipfs/thirdparty/datastore2" pi "github.com/ipfs/go-ipfs/thirdparty/posinfo" "gx/ipfs/QmVA4mafxbfH5aEvNz8fyoxC6J1xhAtw88B4GerPznSZBg/go-block-format" cid "gx/ipfs/QmTprEaAA2A9bst5XH7exuyi5KzNMK3SEDNN8rBDnKWcUS/go-cid" ) func TestAddRecursive(t *testing.T) { r := &repo.Mock{ C: config.Config{ Identity: config.Identity{ PeerID: "Qmfoo", // required by offline node }, }, D: ds2.ThreadSafeCloserMapDatastore(), } node, err := core.NewNode(context.Background(), &core.BuildCfg{Repo: r}) if err != nil { t.Fatal(err) } if k, err := AddR(node, "test_data"); err != nil { t.Fatal(err) } else if k != "QmWCCga8AbTyfAQ7pTnGT6JgmRMAB3Qp8ZmTEFi5q5o8jC" { t.Fatal("keys do not match: ", k) } } func TestAddGCLive(t *testing.T) { r := &repo.Mock{ C: config.Config{ Identity: config.Identity{ PeerID: "Qmfoo", // required by offline node }, }, D: ds2.ThreadSafeCloserMapDatastore(), } node, err := core.NewNode(context.Background(), &core.BuildCfg{Repo: r}) if err != nil { t.Fatal(err) } out := make(chan interface{}) adder, err := NewAdder(context.Background(), node.Pinning, node.Blockstore, node.DAG) if err != nil { t.Fatal(err) } adder.Out = out dataa := ioutil.NopCloser(bytes.NewBufferString("testfileA")) rfa := files.NewReaderFile("a", "a", dataa, nil) // make two files with pipes so we can 'pause' the add for timing of the test piper, pipew := io.Pipe() hangfile := files.NewReaderFile("b", "b", piper, nil) datad := ioutil.NopCloser(bytes.NewBufferString("testfileD")) rfd := files.NewReaderFile("d", "d", datad, nil) slf := files.NewSliceFile("files", "files", []files.File{rfa, hangfile, rfd}) addDone := make(chan struct{}) go func() { defer close(addDone) defer close(out) err := adder.AddFile(slf) if err != nil { t.Fatal(err) } }() addedHashes := make(map[string]struct{}) select { case o := <-out: addedHashes[o.(*AddedObject).Hash] = struct{}{} case <-addDone: t.Fatal("add shouldnt complete yet") } var gcout <-chan gc.Result gcstarted := make(chan struct{}) go func() { defer close(gcstarted) gcout = gc.GC(context.Background(), node.Blockstore, node.DAG, node.Pinning, nil) }() // gc shouldnt start until we let the add finish its current file. pipew.Write([]byte("some data for file b")) select { case <-gcstarted: t.Fatal("gc shouldnt have started yet") default: } time.Sleep(time.Millisecond * 100) // make sure gc gets to requesting lock // finish write and unblock gc pipew.Close() // receive next object from adder o := <-out addedHashes[o.(*AddedObject).Hash] = struct{}{} <-gcstarted for r := range gcout { if r.Error != nil { t.Fatal(err) } if _, ok := addedHashes[r.KeyRemoved.String()]; ok { t.Fatal("gc'ed a hash we just added") } } var last *cid.Cid for a := range out { // wait for it to finish c, err := cid.Decode(a.(*AddedObject).Hash) if err != nil { t.Fatal(err) } last = c } ctx, cancel := context.WithTimeout(context.Background(), time.Second*5) defer cancel() set := cid.NewSet() err = dag.EnumerateChildren(ctx, node.DAG.GetLinks, last, set.Visit) if err != nil { t.Fatal(err) } } func testAddWPosInfo(t *testing.T, rawLeaves bool) { r := &repo.Mock{ C: config.Config{ Identity: config.Identity{ PeerID: "Qmfoo", // required by offline node }, }, D: ds2.ThreadSafeCloserMapDatastore(), } node, err := core.NewNode(context.Background(), &core.BuildCfg{Repo: r}) if err != nil { t.Fatal(err) } bs := &testBlockstore{GCBlockstore: node.Blockstore, expectedPath: "/tmp/foo.txt", t: t} bserv := blockservice.New(bs, node.Exchange) dserv := dag.NewDAGService(bserv) adder, err := NewAdder(context.Background(), node.Pinning, bs, dserv) if err != nil { t.Fatal(err) } adder.Out = make(chan interface{}) adder.Progress = true adder.RawLeaves = rawLeaves adder.NoCopy = true data := make([]byte, 5*1024*1024) rand.New(rand.NewSource(2)).Read(data) // Rand.Read never returns an error fileData := ioutil.NopCloser(bytes.NewBuffer(data)) fileInfo := dummyFileInfo{"foo.txt", int64(len(data)), time.Now()} file := files.NewReaderFile("foo.txt", "/tmp/foo.txt", fileData, &fileInfo) go func() { defer close(adder.Out) err = adder.AddFile(file) if err != nil { t.Fatal(err) } }() for range adder.Out { } exp := 0 nonOffZero := 0 if rawLeaves { exp = 1 nonOffZero = 19 } if bs.countAtOffsetZero != exp { t.Fatalf("expected %d blocks with an offset at zero (one root and one leafh), got %d", exp, bs.countAtOffsetZero) } if bs.countAtOffsetNonZero != nonOffZero { // note: the exact number will depend on the size and the sharding algo. used t.Fatalf("expected %d blocks with an offset > 0, got %d", nonOffZero, bs.countAtOffsetNonZero) } } func TestAddWPosInfo(t *testing.T) { testAddWPosInfo(t, false) } func TestAddWPosInfoAndRawLeafs(t *testing.T) { testAddWPosInfo(t, true) }<|fim▁hole|>type testBlockstore struct { blockstore.GCBlockstore expectedPath string t *testing.T countAtOffsetZero int countAtOffsetNonZero int } func (bs *testBlockstore) Put(block blocks.Block) error { bs.CheckForPosInfo(block) return bs.GCBlockstore.Put(block) } func (bs *testBlockstore) PutMany(blocks []blocks.Block) error { for _, blk := range blocks { bs.CheckForPosInfo(blk) } return bs.GCBlockstore.PutMany(blocks) } func (bs *testBlockstore) CheckForPosInfo(block blocks.Block) error { fsn, ok := block.(*pi.FilestoreNode) if ok { posInfo := fsn.PosInfo if posInfo.FullPath != bs.expectedPath { bs.t.Fatal("PosInfo does not have the expected path") } if posInfo.Offset == 0 { bs.countAtOffsetZero += 1 } else { bs.countAtOffsetNonZero += 1 } } return nil } type dummyFileInfo struct { name string size int64 modTime time.Time } func (fi *dummyFileInfo) Name() string { return fi.name } func (fi *dummyFileInfo) Size() int64 { return fi.size } func (fi *dummyFileInfo) Mode() os.FileMode { return 0 } func (fi *dummyFileInfo) ModTime() time.Time { return fi.modTime } func (fi *dummyFileInfo) IsDir() bool { return false } func (fi *dummyFileInfo) Sys() interface{} { return nil }<|fim▁end|>
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pigame.settings") try: from django.core.management import execute_from_command_line except ImportError: # The above import may fail for some other reason. Ensure that the # issue is really that Django is missing to avoid masking other # exceptions on Python 2. try: import django except ImportError: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?"<|fim▁hole|> raise execute_from_command_line(sys.argv)<|fim▁end|>
)
<|file_name|>repro_mixin.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os from pants.util.dirutil import safe_mkdir_for class ReproMixin(object): """ Additional helper methods for use in Repro tests""" def add_file(self, root, path, content): """Add a file with specified contents :param str root: Root directory for path. :param str path: Path relative to root. :param str content: Content to write to file. """ fullpath = os.path.join(root, path) safe_mkdir_for(fullpath) with open(fullpath, 'w') as outfile: outfile.write(content)<|fim▁hole|> """Assert a file at relpath doesn't exist :param str root: Root directory of path. :param str path: Path relative to tar.gz. :return: bool """ fullpath = os.path.join(root, path) self.assertFalse(os.path.exists(fullpath)) def assert_file(self, root, path, expected_content=None): """ Assert that a file exists with the content specified :param str root: Root directory of path. :param str path: Path relative to tar.gz. :param str expected_content: file contents. :return: bool """ fullpath = os.path.join(root, path) self.assertTrue(os.path.isfile(fullpath)) if expected_content: with open(fullpath, 'r') as infile: content = infile.read() self.assertEqual(expected_content, content)<|fim▁end|>
def assert_not_exists(self, root, path):
<|file_name|>p_serial.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # Ver 18 - 15 November 2017 - import time import serial import string import sys import mysql.connector from mysql.connector import errorcode, pooling from db import * import datetime #from threading import Thread import multiprocessing as mp from multiprocessing import Queue from multiprocessing.managers import SyncManager from os import system, devnull from subprocess import call, STDOUT from threading import Thread from time import sleep #import queue ctrlStr = "*../" HOST = '' PORT0 = 5011 PORT1 = 5012 PORT2 = 5013 PORT3 = 5014 AUTHKEY = str("123456").encode("utf-8") def output(o, x): print(str(str(o) + " " + str(datetime.datetime.now().time())[:8]) + " "+ str(x)) sys.stdout.flush() # -- DB Connection --------------------------- try: db = mysql.connector.connect(**config) except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: output("DB", "Something is wrong with your user name or password") elif err.errno == errorcode.ER_BAD_DB_ERROR: output("DB", "Database does not exists") else: output(err) else: output("PYSERIAL","Start procedure") db.commit() # -- END DB Connection --------------------------- # -- Open Serial to the Coordinator--------------- serCoord = serial.Serial('/dev/ttymxc3', 115200, timeout=10) #serCoord = serial.Serial('COM5', 115200, timeout=5) serCoord.timeout = 10 serCoord.setDTR(False) time.sleep(1) # toss any data already received, see serCoord.flushInput() # -- End Open Serial to the Coordinator----------- #----------------------------- # Global Variable declaration #----------------------------- endSerialChars = b"\r\n" global readSerial global serialBuffer pnum = 5 #number of values to send for each sensor # coordinator commands INString = "IN" # to send Node data to the coordinator ISString = "IS" # to send Sensor data to the coordinator IXString = "IX" # to send Address data to the coordinator IAString = "IA" # to send Actuators to the coordinator IMString = "IM" # to send Methods to the coordinator CommExecutedTrue = b"CX1\r\n" CommExecutedFalse = b"CX0\r\n" CommExecutedTrueX = b"CX1" CommExecutedFalseX = b"CX0" CommNotExecuted = b"X" #----------------------------- # End Global Variable declaration #----------------------------- # Gpio pin manager class Gpio: def __init__(self): self.gpios = ["55", "57"] self.gpioval = [0, 0] self.gpiodir = [0, 0] self.current = 0 self.OUTPUT = 1 self.INPUT = 0 self.HIGH = 1 self.LOW = 0 for num in self.gpios: try: with open("/sys/class/gpio/export", "w") as create: create.write(num) with open("/sys/class/gpio/gpio" + self.gpios[current] + "/value", "r") as reads: self.gpioval[self.current] = reads.read() with open("/sys/class/gpio/gpio" + self.gpios[current] + "/direction", "r") as readdir: self.gpiodir[self.current] = (1 if "out" in readdir.read() else 0) self.current += 1 except: sleep(0.000001) def pinMode(self, pin=0, direction=0): try: gpio = self.gpios[int(pin)] if int(direction) != self.gpiodir[pin]:<|fim▁hole|> writer.write("in" if direction < 1 else "out") self.gpiodir[pin] = (0 if direction < 1 else 1) return True except ValueError: output("PYSERIAL","ERROR: pinMode, value inserted wasn't an int") return False except: output("PYSERIAL","ERROR: pinMode, error using pinMode") return False def digitalWrite(self, pin=2, value=0): try: gpio = self.gpios[int(pin)] if self.gpiodir[pin] != 1: with open("/sys/class/gpio/gpio" + gpio + "/direction", "w") as re: re.write("out") self.gpiodir[pin] = 1 if self.gpioval[pin] != int(value): with open("/sys/class/gpio/gpio" + gpio + "/value", "w") as writes: writes.write("0" if value < 1 else "1") self.gpioval[pin] = (0 if value < 1 else 1) return True except ValueError: output("PYSERIAL","ERROR: digitalWrite, value inserted wasn't an int") return False except: output("PYSERIAL","ERROR: digitalWrite, error running") return False def digitalRead(self, pin=2): try: gpio = self.gpios[int(pin)] if self.gpiodir[pin] != 0: with open("/sys/class/gpio/gpio" + gpio + "/direction", "w") as re: re.write("in") self.gpiodir[pin] = 0 with open("/sys/class/gpio/gpio" + gpio + "/value", "r") as reader: self.gpioval[pin] = int(reader.read().replace('\n', '')) return self.gpioval[pin] except ValueError: output("PYSERIAL","ERROR: digitalRead, value inserted wasn't an int") return -1 except: output("PYSERIAL","ERROR: digitalRead, error running") return -1 #-- function to extract integer from strings def parseint(string): return int(''.join([x for x in string if x.isdigit()])) def log(t, m): #curLog = db.cursor() sql = "insert into tblog (type,msg) VALUES (%s, %s)" #try: #curLog.execute(sql, (t,m)) #db.commit() #curLog.close() #except: #raise #curLog.close() def printTime(): now = datetime.datetime.now() print(now.strftime("%H %M %S %f")) def checkInit(): # check Init sql = "SELECT pvalue,pindex FROM tbparam WHERE ptype = 'I'" cur.execute(sql) for (pvalue,pindex) in cur: i = int("{}".format(pindex)) if i == 1: output ("PYSERIAL","Initialize Coordinator") sql = "UPDATE tbparam SET pvalue = 0 WHERE ptype = 'I'" cur.execute(sql) db.commit() cur.close initCoordinator() break sys.stdout.flush() # end check Init #-- Send Init data to the Coordinator --# def initCoordinator(): #printTime() output ("PYSERIAL","Initializing...") global pnum global INString global IXString global ISString global IAString global IMString cur = db.cursor() #--------------------------------------------------------------------------------------------------------# #----begin building string to send out-------------------------------------------------------------------# #--------------------------------------------------------------------------------------------------------# # set numbers of parameters to build the string to send to the coordinator # count the number of nodes sql = "select count(*) as CNT from vwnodes WHERE nodetype != 0" #exclude external node cur.execute(sql) for (CNT) in cur: nodeNum=parseint("{}".format(CNT)) INString = INString + str(nodeNum*pnum) sql = "select count(*) as CNT from vwnodes WHERE nodetype = 2" #xbee nodes cur.execute(sql) for (CNT) in cur: nodeNum=parseint("{}".format(CNT)) IXString = IXString + str(nodeNum) # retrieve node data and buid initialization strings sql = "select id, xbee_high_address, xbee_low_address, nodetype from vwnodes WHERE nodetype != 0 AND status = 1 order by id" cur.execute(sql) for (id, xbee_high_address, xbee_low_address, nodetype) in cur: INString = INString + "," + "{}".format(id) + "," + "{}".format(nodetype) + ",0,0,1" if int("{}".format(nodetype)) == 2: #xbee IXString = IXString + "," + "{}".format(id) + "," + "{}".format(xbee_high_address) + "," + "{}".format(xbee_low_address) #db.commit() # count the number of sensors sql = "select count(*) as CNT from vwsensors where tbNodeType_id != 0 and pin_number < 30" cur.execute(sql) for (CNT) in cur: sensorNum=parseint("{}".format(CNT)) ISString = ISString + str(sensorNum*pnum) db.commit() #//col 0=node 1=sensor 2=value 3=alarm 4=spare #retrieve sensor data and build initialization strings sql = "SELECT nodeid,tbnodetype_id,tbsensortype_id,pin_number FROM vwsensors where tbnodetype_id != 0 and pin_number < 30 and tbstatus_id = 1 order by nodeid,pin_number" cur.execute(sql) for (nodeid,tbnodetype_id,tbsensortype_id,pin_number) in cur: ISString = ISString + "," + "{}".format(nodeid) + "," + "{}".format(pin_number) + ",0,0,0" #db.commit() # count the number of actuators sql = "select count(*) as CNT from vwactuator" cur.execute(sql) for (CNT) in cur: actuatorNum=parseint("{}".format(CNT)) IAString = IAString + str(actuatorNum*pnum) db.commit() #//col 0=node 1=sensor 2=value 3=alarm 4=spare #retrieve actuator data and build initialization strings sql = "select tbnode_id,pinnumber from tbactuator order by tbnode_id,pinnumber" cur.execute(sql) for (tbnode_id,pinnumber) in cur: IAString = IAString + "," + "{}".format(tbnode_id) + "," + "{}".format(pinnumber) + ",0,0,0" # count the number of methods sql = "select count(*) as CNT from vwmethods" cur.execute(sql) for (CNT) in cur: methodNum=parseint("{}".format(CNT)) IMString = IMString + str(methodNum*pnum) db.commit() #//col 0=node 1=actuator 2=method 3=value 4=spare #retrieve method data and build initialization strings sql = "select tbnode_id,pinnumber,method from vwmethods order by tbnode_id,pinnumber,method" cur.execute(sql) for (tbnode_id,pinnumber,method) in cur: IMString = IMString + "," + "{}".format(tbnode_id) + "," + "{}".format(pinnumber) + "," + "{}".format(method) + ",0,0" db.commit() cur.close #--------------------------------------------------------------------------------------------------------# #----end building string to send out---------------------------------------------------------------------# #--------------------------------------------------------------------------------------------------------# #--------------------------------------------------------------------------------------------------------# #----begin Sending init string to the coordinator -------------------------------------------------------# #--------------------------------------------------------------------------------------------------------# output("PYSERIAL","Init sensors") ret = initSendStringsToCoordinator(ISString) if ret == 0: #if fails return 0 output("PYSERIAL","Init actuators") #output(IAString) ret = initSendStringsToCoordinator(IAString) if ret == 0: #if fails return 0 output("PYSERIAL","Init methods") ret = initSendStringsToCoordinator(IMString) if ret == 0: #if fails return 0 output("PYSERIAL","Init nodes") ret = initSendStringsToCoordinator(INString) if ret == 0: #if fails return 0 output("PYSERIAL","Init node addresses Xbee") ret = initSendStringsToCoordinator(IXString) if ret == 0: #if fails return 0 #--------------------------------------------------------------------------------------------------------# #----end Sending init string to the coordinator ---------------------------------------------------------# #--------------------------------------------------------------------------------------------------------# # if Ok cur.close output ("PYSERIAL","End Initializing") return 1 def isResponse(response): if "CX0" in str(response, 'utf-8'): return True elif "CX1" in str(response, 'utf-8'): return True else: return False def isResponseOK(response): print(response) res = False if "CX0" in str(response, 'utf-8'): print(1) res = False elif "CX1" in str(response, 'utf-8'): print(2) res = True else: print(3) res = False print("qqq:") print("xx:", str(response)) return res #--------------------------------------------------------------------------------------------------------# #---- get serial incoming data ---------------------------------------------------------------------# #--------------------------------------------------------------------------------------------------------# def getSerialData(qIN, qOUT, qResponse): output("PYSERIAL","init serial") serCoord.flushInput() readSerial = "" serCoord.timeout = 1 while True: gpio.digitalWrite(0,gpio.LOW) #write high value to pin serialBuffer = serCoord.inWaiting() if serialBuffer > 0: #data available on serial gpio.digitalWrite(0,gpio.HIGH) readSerial = serCoord.readline() readSerial.rstrip(endSerialChars) if isResponse(readSerial) == True: # while not qResponse.empty(): # qResponse.get() #qResponse.put(readSerial) #output("Response received") aa=1 else: qIN.put(readSerial) # print("Data received:", serialBuffer) #print("Q size:", qIn.qsize()) while not qOUT.empty(): #print("Q OUT size:", qOUT.qsize()) stg = qOUT.get() serCoord.write(bytes(stg, 'UTF-8')) output("PYSERIAL","String sent: " + str(stg)) #--------------------------------------------------------------------------------------------------------# #---- End AUTOreceiveDataFromCoordinator --------------------------------------------------------------------# #--------------------------------------------------------------------------------------------------------# def initSendStringsToCoordinator(stg): serCoord.flushInput() # output("PYSERIAL",stg) # send the node string attemptsCnt = 0 while serCoord.inWaiting() == 0 and attemptsCnt < 5: ret = serCoord.write(bytes(stg, 'UTF-8')) readSerial = serCoord.readline() if readSerial == CommExecutedTrue: return 1 time.sleep(0.2) break elif readSerial == CommExecutedFalse: # write error in log log("E", "Error "+stg) else: attemptsCnt = attemptsCnt + 1 #output("PYSERIAL",attemptsCnt) continue # write error in log log("E", "no serial available") return 0 def QueueServerClient(HOST, PORT, AUTHKEY): class QueueManager(SyncManager): pass QueueManager.register('get_queue') QueueManager.register('get_name') QueueManager.register('get_description') manager = QueueManager(address = (HOST, PORT), authkey = AUTHKEY) manager.connect() # This starts the connected client return manager #------- Main section ----------------------------# #------- Run once --------------------------------# log("I", "Initialize coordinator") gpio = Gpio() gpio.pinMode(0, gpio.OUTPUT) ret = 0 curInit = db.cursor() #truncate output tables curInit.callproc('init') curInit.close() # create three connected managers qmIn = QueueServerClient(HOST, PORT0, AUTHKEY) qmOut = QueueServerClient(HOST, PORT1, AUTHKEY) qmSql = QueueServerClient(HOST, PORT2, AUTHKEY) qmResp = QueueServerClient(HOST, PORT3, AUTHKEY) # Get the queue objects from the clients qIn = qmIn.get_queue() qOut = qmOut.get_queue() qSql = qmSql.get_queue() qResp = qmResp.get_queue() while ret == 0: INString = "IN" # to send Node data to the coordinator ISString = "IS" # to send Sensor data to the coordinator IXString = "IX" # to send Address data to the coordinator IAString = "IA" # to send Actuators data to the coordinator IMString = "IM" # to send Methods data to the coordinator ret = initCoordinator() ret = 1 #------- End run once -------------------------# log("I", "Start main loop") getSerialData(qIn, qOut, qResp)<|fim▁end|>
with open("/sys/class/gpio/gpio" + gpio + "/direction", "w") as writer:
<|file_name|>stability-attribute-sanity-2.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|>// More checks that stability attributes are used correctly #![feature(staged_api)] #![stable(feature = "stable_test_feature", since = "1.0.0")] #[stable(feature = "a", feature = "b", since = "1.0.0")] //~ ERROR multiple 'feature' items fn f1() { } #[stable(feature = "a", sinse = "1.0.0")] //~ ERROR unknown meta item 'sinse' fn f2() { } #[unstable(feature = "a", issue = "no")] //~ ERROR incorrect 'issue' fn f3() { } fn main() { }<|fim▁end|>
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms.
<|file_name|>_mod0_1_1_0_0_2.py<|end_file_name|><|fim▁begin|>name0_1_1_0_0_2_0 = None name0_1_1_0_0_2_1 = None<|fim▁hole|>name0_1_1_0_0_2_3 = None name0_1_1_0_0_2_4 = None<|fim▁end|>
name0_1_1_0_0_2_2 = None
<|file_name|>bsd_pf.rs<|end_file_name|><|fim▁begin|>//! PacketFilter implementation for *BSD use std::{ ffi::CString, io::{self, Error, ErrorKind}, mem, net::SocketAddr, ptr, }; use log::trace; use once_cell::sync::Lazy; use socket2::{Protocol, SockAddr}; mod ffi { use cfg_if::cfg_if; use nix::ioctl_readwrite; #[repr(C)] #[derive(Copy, Clone)] pub struct pf_addr { pub pfa: pf_addr__bindgen_ty_1, } #[repr(C)] #[derive(Copy, Clone)] pub union pf_addr__bindgen_ty_1 { pub v4: libc::in_addr, pub v6: libc::in6_addr, pub addr8: [u8; 16usize], pub addr16: [u16; 8usize], pub addr32: [u32; 4usize], _bindgen_union_align: [u32; 4usize], } cfg_if! { if #[cfg(any(target_os = "macos", target_os = "ios"))] { #[repr(C)] #[derive(Copy, Clone)] pub union pf_state_xport { pub port: u16, pub call_id: u16, pub spi: u32, } // Apple's XNU customized structure // // https://github.com/opensource-apple/xnu/blob/master/bsd/net/pfvar.h #[repr(C)] #[derive(Copy, Clone)] pub struct pfioc_natlook { pub saddr: pf_addr, pub daddr: pf_addr, pub rsaddr: pf_addr, pub rdaddr: pf_addr, pub sxport: pf_state_xport, pub dxport: pf_state_xport, pub rsxport: pf_state_xport, pub rdxport: pf_state_xport, pub af: libc::sa_family_t, pub proto: u8, pub proto_variant: u8, pub direction: u8, } impl pfioc_natlook { pub unsafe fn set_sport(&mut self, port: u16) { self.sxport.port = port; } pub unsafe fn set_dport(&mut self, port: u16) { self.dxport.port = port; } pub unsafe fn rdport(&self) -> u16 { self.rdxport.port } } } else { // FreeBSD's definition, should be the same as all the other platforms // // https://github.com/freebsd/freebsd/blob/master/sys/net/pfvar.h #[repr(C)] #[derive(Copy, Clone)] pub struct pfioc_natlook { pub saddr: pf_addr, pub daddr: pf_addr, pub rsaddr: pf_addr, pub rdaddr: pf_addr, pub sport: u16, pub dport: u16, pub rsport: u16, pub rdport: u16, pub af: libc::sa_family_t, pub proto: u8, pub proto_variant: u8, pub direction: u8, } impl pfioc_natlook { pub fn set_sport(&mut self, port: u16) { self.sport = port; } pub fn set_dport(&mut self, port: u16) { self.dport = port; } pub fn rdport(&self) -> u16 { self.rdport } } } } // pub const PF_IN: libc::c_int = 1; pub const PF_OUT: libc::c_int = 2; ioctl_readwrite!(ioc_natlook, 'D', 23, pfioc_natlook); } pub struct PacketFilter { fd: libc::c_int, } impl PacketFilter { fn open() -> io::Result<PacketFilter> { unsafe { let dev_path = CString::new("/dev/pf").expect("CString::new"); // According to FreeBSD's doc // https://www.freebsd.org/cgi/man.cgi?query=pf&sektion=4&apropos=0&manpath=FreeBSD+12.1-RELEASE+and+Ports let fd = libc::open(dev_path.as_ptr(), libc::O_RDONLY); if fd < 0 { let err = Error::last_os_error(); return Err(err); } // Set CLOEXEC let ret = libc::fcntl(fd, libc::F_SETFD, libc::fcntl(fd, libc::F_GETFD) | libc::FD_CLOEXEC); if ret != 0 { let err = Error::last_os_error(); let _ = libc::close(fd); return Err(err); } Ok(PacketFilter { fd }) } } pub fn natlook(&self, bind_addr: &SocketAddr, peer_addr: &SocketAddr, proto: Protocol) -> io::Result<SocketAddr> { trace!("PF natlook peer: {}, bind: {}", peer_addr, bind_addr); unsafe { let mut pnl: ffi::pfioc_natlook = mem::zeroed(); match *bind_addr { SocketAddr::V4(ref v4) => { pnl.af = libc::AF_INET as libc::sa_family_t; let sockaddr = SockAddr::from(*v4); let sockaddr = sockaddr.as_ptr() as *const libc::sockaddr_in; let addr: *const libc::in_addr = &((*sockaddr).sin_addr) as *const _; let port: libc::in_port_t = (*sockaddr).sin_port; #[allow(clippy::size_of_in_element_count)] ptr::copy_nonoverlapping(addr, &mut pnl.daddr.pfa.v4, mem::size_of_val(&pnl.daddr.pfa.v4)); pnl.set_dport(port); } SocketAddr::V6(ref v6) => { pnl.af = libc::AF_INET6 as libc::sa_family_t;<|fim▁hole|> let sockaddr = SockAddr::from(*v6); let sockaddr = sockaddr.as_ptr() as *const libc::sockaddr_in6; let addr: *const libc::in6_addr = &((*sockaddr).sin6_addr) as *const _; let port: libc::in_port_t = (*sockaddr).sin6_port; #[allow(clippy::size_of_in_element_count)] ptr::copy_nonoverlapping(addr, &mut pnl.daddr.pfa.v6, mem::size_of_val(&pnl.daddr.pfa.v6)); pnl.set_dport(port); } } match *peer_addr { SocketAddr::V4(ref v4) => { if pnl.af != libc::AF_INET as libc::sa_family_t { return Err(Error::new(ErrorKind::InvalidInput, "client addr must be ipv4")); } let sockaddr = SockAddr::from(*v4); let sockaddr = sockaddr.as_ptr() as *const libc::sockaddr_in; let addr: *const libc::in_addr = &((*sockaddr).sin_addr) as *const _; let port: libc::in_port_t = (*sockaddr).sin_port; #[allow(clippy::size_of_in_element_count)] ptr::copy_nonoverlapping(addr, &mut pnl.saddr.pfa.v4, mem::size_of_val(&pnl.saddr.pfa.v4)); pnl.set_sport(port); } SocketAddr::V6(ref v6) => { if pnl.af != libc::AF_INET6 as libc::sa_family_t { return Err(Error::new(ErrorKind::InvalidInput, "client addr must be ipv6")); } let sockaddr = SockAddr::from(*v6); let sockaddr = sockaddr.as_ptr() as *const libc::sockaddr_in6; let addr: *const libc::in6_addr = &((*sockaddr).sin6_addr) as *const _; let port: libc::in_port_t = (*sockaddr).sin6_port; #[allow(clippy::size_of_in_element_count)] ptr::copy_nonoverlapping(addr, &mut pnl.saddr.pfa.v6, mem::size_of_val(&pnl.saddr.pfa.v6)); pnl.set_sport(port); } } pnl.proto = i32::from(proto) as u8; pnl.direction = ffi::PF_OUT as u8; if let Err(err) = ffi::ioc_natlook(self.fd, &mut pnl as *mut _) { return Err(Error::from_raw_os_error(err as i32)); } let (_, dst_addr) = SockAddr::init(|dst_addr, addr_len| { if pnl.af == libc::AF_INET as libc::sa_family_t { let dst_addr: &mut libc::sockaddr_in = &mut *(dst_addr as *mut _); dst_addr.sin_family = pnl.af; dst_addr.sin_port = pnl.rdport(); #[allow(clippy::size_of_in_element_count)] ptr::copy_nonoverlapping( &pnl.rdaddr.pfa.v4, &mut dst_addr.sin_addr, mem::size_of_val(&pnl.rdaddr.pfa.v4), ); *addr_len = mem::size_of_val(&pnl.rdaddr.pfa.v4) as libc::socklen_t; } else if pnl.af == libc::AF_INET6 as libc::sa_family_t { let dst_addr: &mut libc::sockaddr_in6 = &mut *(dst_addr as *mut _); dst_addr.sin6_family = pnl.af; dst_addr.sin6_port = pnl.rdport(); #[allow(clippy::size_of_in_element_count)] ptr::copy_nonoverlapping( &pnl.rdaddr.pfa.v6, &mut dst_addr.sin6_addr, mem::size_of_val(&pnl.rdaddr.pfa.v6), ); *addr_len = mem::size_of_val(&pnl.rdaddr.pfa.v6) as libc::socklen_t; } else { unreachable!("sockaddr should be either ipv4 or ipv6"); } Ok(()) })?; Ok(dst_addr.as_socket().expect("SocketAddr")) } } } impl Drop for PacketFilter { fn drop(&mut self) { unsafe { libc::close(self.fd); } } } pub static PF: Lazy<PacketFilter> = Lazy::new(|| match PacketFilter::open() { Ok(pf) => pf, Err(err) if err.kind() == ErrorKind::PermissionDenied => { panic!("open /dev/pf permission denied, consider restart with root user"); } Err(err) => { panic!("open /dev/pf {}", err); } });<|fim▁end|>
<|file_name|>workflow.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright (c) 2013, Theo Crevon # Copyright (c) 2013, Greg Leclercq # # See the file LICENSE for copying permission. from boto.swf.exceptions import SWFResponseError from swf.constants import REGISTERED from swf.querysets.base import BaseQuerySet from swf.models import Domain from swf.models.workflow import (WorkflowType, WorkflowExecution, CHILD_POLICIES) from swf.utils import datetime_timestamp, past_day, get_subkey from swf.exceptions import (ResponseError, DoesNotExistError, InvalidKeywordArgumentError, AlreadyExistsError) class BaseWorkflowQuerySet(BaseQuerySet): """Base domain bounded workflow queryset objects Amazon workflows types and executions are always bounded to a specific domain: so any queryset which means to deal with workflows has to be built against a `domain` :param domain: domain the inheriting queryset belongs to :type domain: swf.model.domain.Domain """ # Amazon response section corresponding # to current queryset informations _infos = 'typeInfo' _infos_plural = 'typeInfos' def __init__(self, domain, *args, **kwargs): super(BaseWorkflowQuerySet, self).__init__(*args, **kwargs) Domain.check(domain) self.domain = domain @property def domain(self): if not hasattr(self, '_domain'): self._domain = None return self._domain @domain.setter def domain(self, value): # Avoiding circular import from swf.models.domain import Domain if not isinstance(value, Domain): err = "domain property has to be of"\ "swf.model.domain.Domain type, not %r"\ % type(value) raise TypeError(err) self._domain = value def _list(self, *args, **kwargs): raise NotImplementedError def _list_items(self, *args, **kwargs): response = {'nextPageToken': None} while 'nextPageToken' in response: response = self._list( *args, next_page_token=response['nextPageToken'], **kwargs ) for item in response[self._infos_plural]: yield item class WorkflowTypeQuerySet(BaseWorkflowQuerySet): # Explicit is better than implicit, keep zen _infos = 'typeInfo' _infos_plural = 'typeInfos' def to_WorkflowType(self, domain, workflow_info, **kwargs): # Not using get_subkey in order for it to explictly # raise when workflowType name doesn't exist for example return WorkflowType( domain, workflow_info['workflowType']['name'], workflow_info['workflowType']['version'], status=workflow_info['status'], **kwargs ) def get(self, name, version, *args, **kwargs): """Fetches the Workflow Type with `name` and `version` :param name: name of the workflow type :type name: String :param version: workflow type version :type version: String :returns: matched workflow type instance :rtype: swf.core.model.workflow.WorkflowType A typical Amazon response looks like: .. code-block:: json { "configuration": { "defaultExecutionStartToCloseTimeout": "300", "defaultTaskStartToCloseTimeout": "300", "defaultTaskList": { "name": "None" }, "defaultChildPolicy": "TERMINATE" }, "typeInfo": { "status": "REGISTERED", "creationDate": 1364492094.968, "workflowType": { "version": "1", "name": "testW" } } } """ try: response = self.connection.describe_workflow_type(self.domain.name, name, version) except SWFResponseError as e: if e.error_code == 'UnknownResourceFault': raise DoesNotExistError(e.body['message']) raise ResponseError(e.body['message']) wt_info = response[self._infos] wt_config = response['configuration'] task_list = kwargs.get('task_list') if task_list is None: task_list = get_subkey(wt_config, ['defaultTaskList', 'name']) child_policy = kwargs.get('child_policy') if child_policy is None: child_policy = wt_config.get('defaultChildPolicy') decision_task_timeout = kwargs.get('decision_task_timeout') if decision_task_timeout is None: decision_task_timeout = wt_config.get( 'defaultTaskStartToCloseTimeout') execution_timeout = kwargs.get('execution_timeout') if execution_timeout is None: execution_timeout = wt_config.get( 'defaultExecutionStartToCloseTimeout') decision_tasks_timeout = kwargs.get('decision_tasks_timeout') if decision_tasks_timeout is None: decision_tasks_timeout = wt_config.get( 'defaultTaskStartToCloseTimeout') return self.to_WorkflowType( self.domain, wt_info, task_list=task_list, child_policy=child_policy, execution_timeout=execution_timeout, decision_tasks_timeout=decision_tasks_timeout, ) def get_or_create(self, name, version, status=REGISTERED, creation_date=0.0, deprecation_date=0.0, task_list=None, child_policy=CHILD_POLICIES.TERMINATE, execution_timeout='300', decision_tasks_timeout='300', description=None, *args, **kwargs): """Fetches, or creates the ActivityType with ``name`` and ``version`` When fetching trying to fetch a matching workflow type, only name and version parameters are taken in account. Anyway, If you'd wanna make sure that in case the workflow type has to be created it is made with specific values, just provide it. :param name: name of the workflow type :type name: String :param version: workflow type version :type version: String :param status: workflow type status :type status: swf.core.ConnectedSWFObject.{REGISTERED, DEPRECATED} :param creation_date: creation date of the current WorkflowType :type creation_date: float (timestamp) :param deprecation_date: deprecation date of WorkflowType :type deprecation_date: float (timestamp) :param task_list: task list to use for scheduling decision tasks for executions of this workflow type :type task_list: String :param child_policy: policy to use for the child workflow executions when a workflow execution of this type is terminated :type child_policy: CHILD_POLICIES.{TERMINATE | REQUEST_CANCEL | ABANDON} :param execution_timeout: maximum duration for executions of this workflow type :type execution_timeout: String :param decision_tasks_timeout: maximum duration of decision tasks for this workflow type :type decision_tasks_timeout: String :param description: Textual description of the workflow type :type description: String :returns: Fetched or created WorkflowType model object :rtype: WorkflowType """ try: return self.get(name, version, task_list=task_list, child_policy=child_policy, execution_timeout=execution_timeout, decision_tasks_timeout=decision_tasks_timeout) except DoesNotExistError: try: return self.create( name, version, status=status, creation_date=creation_date, deprecation_date=deprecation_date, task_list=task_list, child_policy=child_policy, execution_timeout=execution_timeout, decision_tasks_timeout=decision_tasks_timeout, description=description, ) # race conditon could happen if two workflows trying to register the same type except AlreadyExistsError: return self.get(name, version, task_list=task_list, child_policy=child_policy, execution_timeout=execution_timeout, decision_tasks_timeout=decision_tasks_timeout) def _list(self, *args, **kwargs): return self.connection.list_workflow_types(*args, **kwargs) def filter(self, domain=None, registration_status=REGISTERED, name=None, *args, **kwargs): """Filters workflows based on the ``domain`` they belong to, their ``status``, and/or their ``name`` :param domain: domain the workflow type belongs to :type domain: swf.models.domain.Domain :param registration_status: workflow type registration status to match, Valid values are: * ``swf.constants.REGISTERED`` * ``swf.constants.DEPRECATED`` :type registration_status: string :param name: workflow type name to match :type name: string :returns: list of matched WorkflowType models objects :rtype: list """ # As WorkflowTypeQuery has to be built against a specific domain # name, domain filter is disposable, but not mandatory. domain = domain or self.domain return [self.to_WorkflowType(domain, wf) for wf in self._list_items(domain.name, registration_status, name=name)] def all(self, registration_status=REGISTERED, *args, **kwargs): """Retrieves every Workflow types :param registration_status: workflow type registration status to match, Valid values are: * ``swf.constants.REGISTERED`` * ``swf.constants.DEPRECATED`` :type registration_status: string A typical Amazon response looks like: .. code-block:: json { "typeInfos": [ { "status": "REGISTERED", "creationDate": 1364293450.67, "description": "", "workflowType": { "version": "1", "name": "Crawl" } }, { "status": "REGISTERED", "creationDate": 1364492094.968, "workflowType": { "version": "1", "name": "testW" } } ] } """ return self.filter(registration_status=registration_status) def create(self, name, version, status=REGISTERED, creation_date=0.0, deprecation_date=0.0, task_list=None, child_policy=CHILD_POLICIES.TERMINATE, execution_timeout='300', decision_tasks_timeout='300', description=None, *args, **kwargs): """Creates a new remote workflow type and returns the created WorkflowType model instance. :param name: name of the workflow type :type name: String :param version: workflow type version :type version: String :param status: workflow type status :type status: swf.core.ConnectedSWFObject.{REGISTERED, DEPRECATED} :param creation_date: creation date of the current WorkflowType :type creation_date: float (timestamp) :param deprecation_date: deprecation date of WorkflowType :type deprecation_date: float (timestamp) :param task_list: task list to use for scheduling decision tasks for executions of this workflow type :type task_list: String :param child_policy: policy to use for the child workflow executions when a workflow execution of this type is terminated :type child_policy: CHILD_POLICIES.{TERMINATE | REQUEST_CANCEL | ABANDON} :param execution_timeout: maximum duration for executions of this workflow type :type execution_timeout: String :param decision_tasks_timeout: maximum duration of decision tasks for this workflow type :type decision_tasks_timeout: String :param description: Textual description of the workflow type :type description: String """ workflow_type = WorkflowType( self.domain, name, version, status=status, creation_date=creation_date, deprecation_date=deprecation_date, task_list=task_list, child_policy=child_policy, execution_timeout=execution_timeout, decision_tasks_timeout=decision_tasks_timeout, description=description ) workflow_type.save() return workflow_type class WorkflowExecutionQuerySet(BaseWorkflowQuerySet): """Fetches Workflow executions""" _infos = 'executionInfo' _infos_plural = 'executionInfos' def _is_valid_status_param(self, status, param): statuses = { WorkflowExecution.STATUS_OPEN: set([ 'oldest_date', 'latest_date'], ), WorkflowExecution.STATUS_CLOSED: set([ 'start_latest_date', 'start_oldest_date', 'close_latest_date', 'close_oldest_date', 'close_status' ]), } return param in statuses.get(status, set()) def _validate_status_parameters(self, status, params): return [param for param in params if not self._is_valid_status_param(status, param)] def list_workflow_executions(self, status, *args, **kwargs): statuses = { WorkflowExecution.STATUS_OPEN: 'open', WorkflowExecution.STATUS_CLOSED: 'closed', } # boto.swf.list_closed_workflow_executions awaits a `start_oldest_date` # MANDATORY kwarg, when boto.swf.list_open_workflow_executions awaits a # `oldest_date` mandatory arg. if status == WorkflowExecution.STATUS_OPEN: kwargs['oldest_date'] = kwargs.pop('start_oldest_date') try: method = 'list_{}_workflow_executions'.format(statuses[status]) return getattr(self.connection, method)(*args, **kwargs) except KeyError: raise ValueError("Unknown status provided: %s" % status) def get_workflow_type(self, execution_info): workflow_type = execution_info['workflowType'] workflow_type_qs = WorkflowTypeQuerySet(self.domain) return workflow_type_qs.get( workflow_type['name'], workflow_type['version'], ) def to_WorkflowExecution(self, domain, execution_info, **kwargs): workflow_type = WorkflowType( self.domain, execution_info['workflowType']['name'], execution_info['workflowType']['version'] ) return WorkflowExecution( domain, get_subkey(execution_info, ['execution', 'workflowId']), # workflow_id run_id=get_subkey(execution_info, ['execution', 'runId']), workflow_type=workflow_type, status=execution_info.get('executionStatus'), close_status=execution_info.get('closeStatus'), tag_list=execution_info.get('tagList'), start_timestamp=execution_info.get('startTimestamp'), close_timestamp=execution_info.get('closeTimestamp'), cancel_requested=execution_info.get('cancelRequested'), parent=execution_info.get('parent'), **kwargs ) def get(self, workflow_id, run_id, *args, **kwargs): """ """ try: response = self.connection.describe_workflow_execution( self.domain.name, run_id, workflow_id) except SWFResponseError as e: if e.error_code == 'UnknownResourceFault': raise DoesNotExistError(e.body['message']) raise ResponseError(e.body['message']) execution_info = response[self._infos] execution_config = response['executionConfiguration'] return self.to_WorkflowExecution( self.domain, execution_info, task_list=get_subkey(execution_config, ['taskList', 'name']), child_policy=execution_config.get('childPolicy'), execution_timeout=execution_config.get('executionStartToCloseTimeout'), decision_tasks_timeout=execution_config.get('taskStartToCloseTimeout'), latest_activity_task_timestamp=response.get('latestActivityTaskTimestamp'), latest_execution_context=response.get('latestExecutionContext'), open_counts=response['openCounts'], ) <|fim▁hole|> status=WorkflowExecution.STATUS_OPEN, tag=None, workflow_id=None, workflow_type_name=None, workflow_type_version=None, *args, **kwargs): """Filters workflow executions based on kwargs provided criteras :param status: workflow executions with provided status will be kept. Valid values are: * ``swf.models.WorkflowExecution.STATUS_OPEN`` * ``swf.models.WorkflowExecution.STATUS_CLOSED`` :type status: string :param tag: workflow executions containing the tag will be kept :type tag: String :param workflow_id: workflow executions attached to the id will be kept :type workflow_id: String :param workflow_type_name: workflow executions attached to the workflow type with provided name will be kept :type workflow_type_name: String :param workflow_type_version: workflow executions attached to the workflow type of the provided version will be kept :type workflow_type_version: String **Be aware that** querying over status allows the usage of statuses specific kwargs * STATUS_OPEN :param start_latest_date: latest start or close date and time to return (in days) :type start_latest_date: int * STATUS_CLOSED :param start_latest_date: workflow executions that meet the start time criteria of the filter are kept (in days) :type start_latest_date: int :param start_oldest_date: workflow executions that meet the start time criteria of the filter are kept (in days) :type start_oldest_date: int :param close_latest_date: workflow executions that meet the close time criteria of the filter are kept (in days) :type close_latest_date: int :param close_oldest_date: workflow executions that meet the close time criteria of the filter are kept (in days) :type close_oldest_date: int :param close_status: must match the close status of an execution for it to meet the criteria of this filter. Valid values are: * ``CLOSE_STATUS_COMPLETED`` * ``CLOSE_STATUS_FAILED`` * ``CLOSE_STATUS_CANCELED`` * ``CLOSE_STATUS_TERMINATED`` * ``CLOSE_STATUS_CONTINUED_AS_NEW`` * ``CLOSE_TIMED_OUT`` :type close_status: string :returns: workflow executions objects list :rtype: list """ # As WorkflowTypeQuery has to be built against a specific domain # name, domain filter is disposable, but not mandatory. invalid_kwargs = self._validate_status_parameters(status, kwargs) if invalid_kwargs: err_msg = 'Invalid keyword arguments supplied: {}'.format( ', '.join(invalid_kwargs)) raise InvalidKeywordArgumentError(err_msg) if status == WorkflowExecution.STATUS_OPEN: oldest_date = kwargs.pop('oldest_date', 30) else: # The SWF docs on ListClosedWorkflowExecutions state that: # # "startTimeFilter and closeTimeFilter are mutually exclusive" # # so we must figure out if we have to add a default value for # start_oldest_date or not. if "close_latest_date" in kwargs or "close_oldest_date" in kwargs: default_oldest_date = None else: default_oldest_date = 30 oldest_date = kwargs.pop('start_oldest_date', default_oldest_date) # Compute a timestamp from the delta in days we got from params # If oldest_date is blank at this point, it's because we didn't want # it, so let's leave it blank and assume the user provided an other # time filter. if oldest_date: start_oldest_date = int(datetime_timestamp(past_day(oldest_date))) else: start_oldest_date = None return [self.to_WorkflowExecution(self.domain, wfe) for wfe in self._list_items( *args, domain=self.domain.name, status=status, workflow_id=workflow_id, workflow_name=workflow_type_name, workflow_version=workflow_type_version, start_oldest_date=start_oldest_date, tag=tag, **kwargs )] def _list(self, *args, **kwargs): return self.list_workflow_executions(*args, **kwargs) def all(self, status=WorkflowExecution.STATUS_OPEN, start_oldest_date=30, *args, **kwargs): """Fetch every workflow executions during the last `start_oldest_date` days, with `status` :param status: Workflow executions status filter :type status: swf.models.WorkflowExecution.{STATUS_OPEN, STATUS_CLOSED} :param start_oldest_date: Specifies the oldest start/close date to return. :type start_oldest_date: integer (days) :returns: workflow executions objects list :rtype: list A typical amazon response looks like: .. code-block:: json { "executionInfos": [ { "cancelRequested": "boolean", "closeStatus": "string", "closeTimestamp": "number", "execution": { "runId": "string", "workflowId": "string" }, "executionStatus": "string", "parent": { "runId": "string", "workflowId": "string" }, "startTimestamp": "number", "tagList": [ "string" ], "workflowType": { "name": "string", "version": "string" } } ], "nextPageToken": "string" } """ start_oldest_date = datetime_timestamp(past_day(start_oldest_date)) return [self.to_WorkflowExecution(self.domain, wfe) for wfe in self._list_items( status, self.domain.name, start_oldest_date=int(start_oldest_date))]<|fim▁end|>
def filter(self,
<|file_name|>OnEventAdapter.java<|end_file_name|><|fim▁begin|>package com.mithos.bfg.loop; import java.awt.event.KeyEvent; import java.awt.event.MouseEvent; import java.awt.event.MouseWheelEvent; /** * This class is an adapter for {@link OnEvent}. All methods do * nothing and return true, so you need only implement the methods * that you need to. * @author James McMahon * */ public class OnEventAdapter implements OnEvent { @Override<|fim▁hole|> } @Override public boolean keyReleased(KeyEvent e) { return true; } @Override public boolean mousePressed(MouseEvent e) { return true; } @Override public boolean mouseMoved(MouseEvent e) { return true; } @Override public boolean mouseReleased(MouseEvent e) { return true; } @Override public boolean mouseWheel(MouseWheelEvent e) { return true; } }<|fim▁end|>
public boolean keyPressed(KeyEvent e) { return true;
<|file_name|>0002_auto_20170703_1345.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.11.2 on 2017-07-03 13:45 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('frontend', '0001_initial'), ] operations = [ migrations.AlterField(<|fim▁hole|> field=models.DateTimeField(auto_now=True), ), ]<|fim▁end|>
model_name='frontenddeployment', name='deployed_at',
<|file_name|>db_create.py<|end_file_name|><|fim▁begin|>from migrate.versioning import api from config import SQLALCHEMY_DATABASE_URI<|fim▁hole|>from app import db import os.path db.create_all() if not os.path.exists(SQLALCHEMY_MIGRATE_REPO): api.create(SQLALCHEMY_MIGRATE_REPO, 'database_repository') api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) else: api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, api.version(SQLALCHEMY_MIGRATE_REPO))<|fim▁end|>
from config import SQLALCHEMY_MIGRATE_REPO
<|file_name|>main.py<|end_file_name|><|fim▁begin|># My files from handlers import MainPage from handlers import WelcomePage from handlers import SignUpPage from handlers import SignIn from handlers import SignOut from handlers import NewPost from handlers import EditPost from handlers import DeletePost from handlers import SinglePost from handlers import LikePost from handlers import DislikePost from handlers import EditComment from handlers import DeleteComment <|fim▁hole|>import webapp2 app = webapp2.WSGIApplication([ ('/', MainPage), ('/signup', SignUpPage), ('/welcome', WelcomePage), ('/post/([0-9]+)', SinglePost), ('/new-post', NewPost), ('/edit-post/([0-9]+)', EditPost), ('/delete-post', DeletePost), ('/like-post', LikePost), ('/dislike-post', DislikePost), ('/edit-comment', EditComment), ('/delete-comment', DeleteComment), ('/login', SignIn), ('/logout', SignOut) ], debug=True)<|fim▁end|>
<|file_name|>test_variables.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- """Test class for Smart/Puppet Variables :Requirement: Smart_Variables :CaseAutomation: Automated :CaseLevel: Acceptance :CaseComponent: API :TestType: Functional :CaseImportance: High :Upstream: No """ import json from random import choice, uniform import yaml from fauxfactory import gen_integer, gen_string from nailgun import entities from requests import HTTPError from robottelo.api.utils import publish_puppet_module from robottelo.constants import CUSTOM_PUPPET_REPO from robottelo.datafactory import ( filtered_datapoint, generate_strings_list, invalid_values_list, valid_data_list, ) from robottelo.decorators import ( run_only_on, skip_if_bug_open, stubbed, tier1, tier2, ) from robottelo.test import APITestCase @filtered_datapoint def valid_sc_variable_data(): """Returns a list of valid smart class variable types and values""" return [ { u'sc_type': 'string', u'value': choice(generate_strings_list()), }, { u'sc_type': 'boolean', u'value': choice([True, False]), }, { u'sc_type': 'integer', u'value': gen_integer(), }, { u'sc_type': 'real', u'value': uniform(-1000, 1000), }, { u'sc_type': 'array', u'value': u'["{0}","{1}","{2}"]'.format( gen_string('alpha'), gen_string('numeric').lstrip('0'), gen_string('html'), ), }, { u'sc_type': 'hash', u'value': '{{ "{0}": "{1}" }}'.format( gen_string('alpha'), gen_string('alpha')), }, { u'sc_type': 'yaml', u'value': '--- {0}=>{1} ...'.format( gen_string('alpha'), gen_string('alpha')), }, { u'sc_type': 'json', u'value': u'{{"{0}":"{1}","{2}":"{3}"}}'.format( gen_string('alpha'), gen_string('numeric').lstrip('0'), gen_string('alpha'), gen_string('alphanumeric') ), }, ] @filtered_datapoint def invalid_sc_variable_data(): """Returns a list of invalid smart class variable type and values""" return [ { u'sc_type': 'boolean', u'value': gen_string('alphanumeric'), }, { u'sc_type': 'integer', u'value': gen_string('utf8'), }, { u'sc_type': 'real', u'value': gen_string('alphanumeric'), }, { u'sc_type': 'array', u'value': gen_string('alpha'), }, { u'sc_type': 'hash', u'value': gen_string('alpha'), }, { u'sc_type': 'yaml', u'value': '{{{0}:{1}}}'.format( gen_string('alpha'), gen_string('alpha')), }, { u'sc_type': 'json', u'value': u'{{{0}:{1},{2}:{3}}}'.format( gen_string('alpha'), gen_string('numeric').lstrip('0'), gen_string('alpha'), gen_string('alphanumeric') ), } ] class SmartVariablesTestCase(APITestCase): """Implements Smart Variables tests in API""" @classmethod def setUpClass(cls): """Import some parametrized puppet classes. This is required to make sure that we have data to be able to perform interactions with smart class variables. """ super(SmartVariablesTestCase, cls).setUpClass() cls.puppet_modules = [ {'author': 'robottelo', 'name': 'api_test_variables'}, ] cls.org = entities.Organization().create() cv = publish_puppet_module( cls.puppet_modules, CUSTOM_PUPPET_REPO, cls.org) cls.env = entities.Environment().search( query={'search': u'content_view="{0}"'.format(cv.name)} )[0] # Find imported puppet class cls.puppet_class = entities.PuppetClass().search(query={ 'search': u'name = "{0}" and environment = "{1}"'.format( cls.puppet_modules[0]['name'], cls.env.name) })[0] # And all its subclasses cls.puppet_subclasses = entities.PuppetClass().search(query={ 'search': u'name ~ "{0}::" and environment = "{1}"'.format( cls.puppet_modules[0]['name'], cls.env.name) }) # TearDown brakes parallel tests run as every test depends on the same # puppet class that will be removed during TearDown. # Uncomment for developing or debugging and do not forget to import # `robottelo.api.utils.delete_puppet_class`. # # @classmethod # def tearDownClass(cls): # """Removes puppet class.""" # super(SmartVariablesTestCase, cls).tearDownClass() # delete_puppet_class(cls.puppet_class.name) @run_only_on('sat') @tier1 def test_positive_create(self): """Create a Smart Variable with valid name :id: 4cd20cca-d419-43f5-9734-e9ae1caae4cb :steps: Create a smart Variable with Valid name and valid default value :expectedresults: The smart Variable is created successfully :CaseImportance: Critical """ for name in valid_data_list(): with self.subTest(name): smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, variable=name, ).create() self.assertEqual(smart_variable.variable, name) @run_only_on('sat') @tier1 def test_negative_create(self): """Create a Smart Variable with invalid name :id: d92f8bdd-93de-49ba-85a3-685aac9eda0a :steps: Create a smart Variable with invalid name and valid default value :expectedresults: The smart Variable is not created :CaseImportance: Critical """ for name in invalid_values_list(): with self.subTest(name), self.assertRaises(HTTPError): entities.SmartVariable( puppetclass=self.puppet_class, variable=name, ).create() @run_only_on('sat') @tier1 def test_positive_delete_smart_variable_by_id(self): """Delete a Smart Variable by id :id: 6d8354db-a028-4ae0-bcb6-87aa1cb9ec5d :steps: Delete a smart Variable by id :expectedresults: The smart Variable is deleted successfully :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class ).create() smart_variable.delete() with self.assertRaises(HTTPError) as context: smart_variable.read() self.assertRegexpMatches( context.exception.response.text, "Smart variable not found by id" ) @run_only_on('sat') @skip_if_bug_open('bugzilla', 1375857) @tier1 def test_positive_update_variable_puppet_class(self): """Update Smart Variable's puppet class. :id: 2312cb28-c3b0-4fbc-84cf-b66f0c0c64f0 :steps: 1. Create a smart variable with valid name. 2. Update the puppet class associated to the smart variable created in step1. :expectedresults: The variable is updated with new puppet class. :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, ).create() self.assertEqual(smart_variable.puppetclass.id, self.puppet_class.id) new_puppet = entities.PuppetClass().search(query={ 'search': 'name="{0}"'.format(choice(self.puppet_subclasses).name) })[0] smart_variable.puppetclass = new_puppet updated_sv = smart_variable.update(['puppetclass']) self.assertEqual(updated_sv.puppetclass.id, new_puppet.id) @run_only_on('sat') @tier1 def test_positive_update_name(self): """Update Smart Variable's name :id: b8214eaa-e276-4fc4-8381-fb0386cda6a5 :steps: 1. Create a smart variable with valid name. 2. Update smart variable name created in step1. :expectedresults: The variable is updated with new name. :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, ).create() for new_name in valid_data_list(): with self.subTest(new_name): smart_variable.variable = new_name smart_variable = smart_variable.update(['variable']) self.assertEqual(smart_variable.variable, new_name) @run_only_on('sat') @tier1 def test_negative_duplicate_name_variable(self): """Create Smart Variable with an existing name. :id: c49ad14d-913f-4adc-8ebf-88493556c027 :steps: 1. Create a smart Variable with Valid name and default value. 2. Attempt to create a variable with same name from same/other class. :expectedresults: The variable with same name are not allowed to create from any class. :CaseImportance: Critical """ name = gen_string('alpha') entities.SmartVariable( variable=name, puppetclass=self.puppet_class, ).create() with self.assertRaises(HTTPError) as context: entities.SmartVariable( variable=name, puppetclass=self.puppet_class, ).create() self.assertRegexpMatches( context.exception.response.text, "Key has already been taken" ) @run_only_on('sat') @tier2 def test_positive_list_variables_by_host_id(self): """List all the variables associated to Host by host id :id: 4fc1f249-5da7-493b-a1d3-4ce7b625ad96 :expectedresults: All variables listed for Host :CaseLevel: Integration """ entities.SmartVariable(puppetclass=self.puppet_class).create() host = entities.Host(organization=self.org).create() host.environment = self.env host.update(['environment']) host.add_puppetclass(data={'puppetclass_id': self.puppet_class.id}) self.assertGreater(len(host.list_smart_variables()['results']), 0) @run_only_on('sat') @tier2 def test_positive_list_variables_by_hostgroup_id(self): """List all the variables associated to HostGroup by hostgroup id :id: db6861cc-b390-45bc-8c7d-cf10f46aecb3 :expectedresults: All variables listed for HostGroup :CaseLevel: Integration """ entities.SmartVariable(puppetclass=self.puppet_class).create() hostgroup = entities.HostGroup().create() hostgroup.add_puppetclass( data={'puppetclass_id': self.puppet_class.id}) self.assertGreater(len(hostgroup.list_smart_variables()['results']), 0) @run_only_on('sat') @tier1 def test_positive_list_variables_by_puppetclass_id(self): """List all the variables associated to puppet class by puppet class id :id: cd743329-b354-4ddc-ada0-3ddd774e2701 :expectedresults: All variables listed for puppet class :CaseImportance: Critical """ self.assertGreater(len(self.puppet_class.list_smart_variables()), 0) @run_only_on('sat') @tier1 def test_positive_create_variable_type(self): """Create variable for variable types - Valid Value Types - string, boolean, integer, real, array, hash, yaml, json :id: 4c8b4134-33c1-4f7f-83f9-a751c49ae2da :steps: Create a variable with all valid key types and default values :expectedresults: Variable created with all given types successfully :CaseImportance: Critical """ for data in valid_sc_variable_data(): with self.subTest(data): smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, variable_type=data['sc_type'], default_value=data['value'], ).create() self.assertEqual(smart_variable.variable_type, data['sc_type']) if data['sc_type'] in ('json', 'hash', 'array'): self.assertEqual( smart_variable.default_value, json.loads(data['value']) ) elif data['sc_type'] == 'yaml': self.assertEqual( smart_variable.default_value, yaml.load(data['value'])) else: self.assertEqual( smart_variable.default_value, data['value']) @run_only_on('sat') @tier1 def test_negative_create_variable_type(self): """Negative variable Update for variable types - Invalid Value Types - string, boolean, integer, real, array, hash, yaml, json :id: 9709d67c-682f-4e6c-8b8b-f02f6c2d3b71 :steps: Create a variable with all valid key types and invalid default values :expectedresults: Variable is not created for invalid value :CaseImportance: Critical """ for data in invalid_sc_variable_data(): with self.subTest(data): with self.assertRaises(HTTPError) as context: entities.SmartVariable( puppetclass=self.puppet_class, variable_type=data['sc_type'], default_value=data['value'], ).create() self.assertRegexpMatches( context.exception.response.text, "Default value is invalid" ) @run_only_on('sat') @tier1 def test_positive_create_matcher_empty_value(self): """Create matcher with empty value with string type :id: a90b5bcd-f76c-4663-bf41-2f96e7e15c0f :steps: Create a matcher for variable with empty value and type string :expectedresults: Matcher is created with empty value :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, variable_type='string', override_value_order='is_virtual', ).create() entities.OverrideValue( smart_variable=smart_variable, match='is_virtual=true', value='', ).create() smart_variable = smart_variable.read() self.assertEqual( smart_variable.override_values[0]['match'], 'is_virtual=true') self.assertEqual( smart_variable.override_values[0]['value'], '') @run_only_on('sat') @tier1 def test_negative_create_matcher_empty_value(self): """Create matcher with empty value with type other than string :id: ad24999f-1bed-4abb-a01f-3cb485d67968 :steps: Create a matcher for variable with empty value and type any other than string :expectedresults: Matcher is not created for empty value :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value=gen_integer(), variable_type='integer', override_value_order='is_virtual', ).create() with self.assertRaises(HTTPError) as context: entities.OverrideValue( smart_variable=smart_variable, match='is_virtual=true', value='', ).create() self.assertEqual(len(smart_variable.read().override_values), 0) self.assertRegexpMatches( context.exception.response.text, "Validation failed: Value is invalid integer" ) @run_only_on('sat') @tier1 def test_negative_create_with_invalid_match_value(self): """Attempt to create matcher with invalid match value. :id: 625e3221-237d-4440-ab71-6d98cff67713 :steps: Create a matcher for variable with invalid match value :expectedresults: Matcher is not created :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, ).create() with self.assertRaises(HTTPError) as context: entities.OverrideValue( smart_variable=smart_variable, match='invalid_value', value=gen_string('alpha'), ).create() self.assertRegexpMatches( context.exception.response.text, "Validation failed: Match is invalid" ) @run_only_on('sat') @tier1 def test_negative_create_default_value_with_regex(self): """Create variable with non matching regex validator :id: 0c80bd58-26aa-4c2a-a087-ed3b88b226a7 :steps: 1. Create variable with default value that doesn't matches the regex of step 2 2. Validate this value with regexp validator type and rule :expectedresults: Variable is not created for non matching value with regex :CaseImportance: Critical """ value = gen_string('alpha') smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value=value, ).create() smart_variable.default_value = gen_string('alpha') smart_variable.validator_type = 'regexp' smart_variable.validator_rule = '[0-9]' with self.assertRaises(HTTPError) as context: smart_variable.update([ 'default_value', 'validator_type', 'validator_rule' ]) self.assertRegexpMatches( context.exception.response.text, "Validation failed: Default value is invalid" ) self.assertEqual(smart_variable.read().default_value, value) @run_only_on('sat') @tier1 def test_positive_create_default_value_with_regex(self): """Create variable with matching regex validator :id: aa9803b9-9a45-4ad8-b502-e0e32fc4b7d8 :steps: 1. Create variable with default value that matches the regex of step 2 2. Validate this value with regex validator type and rule :expectedresults: Variable is created for matching value with regex :CaseImportance: Critical """ value = gen_string('numeric') smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value=gen_string('alpha'), ).create() smart_variable.default_value = value smart_variable.validator_type = 'regexp' smart_variable.validator_rule = '[0-9]' smart_variable.update([ 'default_value', 'validator_type', 'validator_rule' ]) smart_variable = smart_variable.read() self.assertEqual(smart_variable.default_value, value) self.assertEqual(smart_variable.validator_type, 'regexp') self.assertEqual(smart_variable.validator_rule, '[0-9]') @run_only_on('sat') @tier1 def test_negative_create_matcher_value_with_regex(self): """Create matcher with non matching regexp validator :id: 8a0f9251-7992-4d1e-bace-7e32637bf56f :steps: 1. Create a matcher with value that doesn't matches the regex of step 2 2. Validate this value with regex validator type and rule :expectedresults: Matcher is not created for non matching value with regexp :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value=gen_string('numeric'), validator_type='regexp', validator_rule='[0-9]', ).create() with self.assertRaises(HTTPError) as context: entities.OverrideValue( smart_variable=smart_variable, match='domain=example.com', value=gen_string('alpha'), ).create() self.assertRegexpMatches( context.exception.response.text, "Validation failed: Value is invalid" ) self.assertEqual(len(smart_variable.read().override_values), 0) @run_only_on('sat') @tier1 def test_positive_create_matcher_value_with_regex(self): """Create matcher with matching regex validator :id: 3ad09261-eb55-4758-b915-84006c9e527c :steps: 1. Create a matcher with value that matches the regex of step 2 2. Validate this value with regex validator type and rule :expectedresults: Matcher is created for matching value with regex :CaseImportance: Critical """ value = gen_string('numeric') smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value=gen_string('numeric'), validator_type='regexp', validator_rule='[0-9]', ).create() entities.OverrideValue( smart_variable=smart_variable, match='domain=example.com', value=value, ).create() smart_variable = smart_variable.read() self.assertEqual(smart_variable.validator_type, 'regexp') self.assertEqual(smart_variable.validator_rule, '[0-9]') self.assertEqual( smart_variable.override_values[0]['match'], 'domain=example.com') self.assertEqual( smart_variable.override_values[0]['value'], value) @run_only_on('sat') @tier1 def test_negative_create_default_value_with_list(self): """Create variable with non matching list validator :id: cacb83a5-3e50-490b-b94f-a5d27f44ae12 :steps: 1. Create variable with default value that doesn't matches the list validator of step 2 2. Validate this value with list validator type and rule :expectedresults: Variable is not created for non matching value with list validator :CaseImportance: Critical """ with self.assertRaises(HTTPError) as context: entities.SmartVariable( puppetclass=self.puppet_class, default_value=gen_string('alphanumeric'), validator_type='list', validator_rule='5, test', ).create() self.assertRegexpMatches( context.exception.response.text, r"Default value \w+ is not one of" ) @run_only_on('sat') @tier1 def test_positive_create_default_value_with_list(self): """Create variable with matching list validator :id: 6bc2caa0-1300-4751-8239-34b96517465b :steps: 1. Create variable with default value that matches the list validator of step 2 2. Validate this value with list validator type and rule :expectedresults: Variable is created for matching value with list :CaseImportance: Critical """ # Generate list of values values_list = [ gen_string('alpha'), gen_string('alphanumeric'), gen_integer(min_value=100), choice(['true', 'false']), ] # Generate string from list for validator_rule values_list_str = ", ".join(str(x) for x in values_list) value = choice(values_list) smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value=value, validator_type='list', validator_rule=values_list_str, ).create() self.assertEqual(smart_variable.default_value, value) self.assertEqual(smart_variable.validator_type, 'list') self.assertEqual(smart_variable.validator_rule, values_list_str) @run_only_on('sat') @tier1 def test_negative_create_matcher_value_with_list(self): """Create matcher with non matching list validator :id: 0aff0fdf-5a62-49dc-abe1-b727459d030a :steps: 1. Create a matcher with value that doesn't matches the list validator of step 2 2. Validate this value with list validator type and rule :expectedresults: Matcher is not created for non matching value with list validator :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value='example', validator_type='list', validator_rule='test, example, 30', ).create() with self.assertRaises(HTTPError) as context: entities.OverrideValue( smart_variable=smart_variable, match='domain=example.com', value='not_in_list', ).create() self.assertRegexpMatches( context.exception.response.text, r"Validation failed: Value \w+ is not one of" ) self.assertEqual(len(smart_variable.read().override_values), 0) @run_only_on('sat') @tier1 def test_positive_create_matcher_value_with_list(self): """Create matcher with matching list validator :id: f5eda535-6623-4130-bea0-97faf350a6a6 :steps: 1. Create a matcher with value that matches the list validator of step 2 2. Validate this value with list validator type and rule :expectedresults: Matcher is created for matching value with list validator :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value='example', validator_type='list', validator_rule='test, example, 30', ).create() entities.OverrideValue( smart_variable=smart_variable, match='domain=example.com', value=30, ).create() smart_variable = smart_variable.read() self.assertEqual(smart_variable.validator_type, 'list') self.assertEqual(smart_variable.validator_rule, 'test, example, 30') self.assertEqual( smart_variable.override_values[0]['match'], 'domain=example.com') self.assertEqual( smart_variable.override_values[0]['value'], 30) @run_only_on('sat') @skip_if_bug_open('bugzilla', 1375643) @tier1 def test_negative_create_matcher_value_with_default_type(self): """Create matcher with non matching type of default value :id: 790c63d7-4e8a-4187-8566-3d85d57f9a4f :steps: 1. Create variable with valid type and value 2. Create a matcher with value that doesn't matches the default type :expectedresults: Matcher is not created for non matching the type of default value :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value=True, variable_type='boolean', ).create() with self.assertRaises(HTTPError) as context: entities.OverrideValue( smart_variable=smart_variable, match='domain=example.com', value=50, ).create() self.assertRegexpMatches( context.exception.response.text, "Validation failed: Value is invalid" ) self.assertEqual(smart_variable.read().default_value, True) @run_only_on('sat') @tier1 def test_positive_create_matcher_value_with_default_type(self): """Create matcher with matching type of default value :id: 99057f05-62cb-4230-b16c-d96ca6a5ae91 :steps: 1. Create variable with valid type and value 2. Create a matcher with value that matches the default value type :expectedresults: Matcher is created for matching the type of default value :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value=True, variable_type='boolean', override_value_order='is_virtual', ).create() entities.OverrideValue( smart_variable=smart_variable, match='is_virtual=true', value=False, ).create() smart_variable = smart_variable.read() self.assertEqual( smart_variable.override_values[0]['match'], 'is_virtual=true') self.assertEqual( smart_variable.override_values[0]['value'], False) @run_only_on('sat') @tier1 def test_negative_create_matcher_non_existing_attribute(self): """Create matcher for non existing attribute :id: 23b16e7f-0626-467e-b53b-35e1634cc30d :steps: Create matcher for non existing attribute :expectedresults: Matcher is not created for non existing attribute :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, ).create() with self.assertRaises(HTTPError) as context: entities.OverrideValue( smart_variable=smart_variable, match='hostgroup=nonexistingHG', value=gen_string('alpha') ).create() self.assertRegexpMatches( context.exception.response.text, "Validation failed: Match hostgroup=nonexistingHG does not match " "an existing host group" ) self.assertEqual(len(smart_variable.read().override_values), 0) @run_only_on('sat') @tier1 def test_positive_create_matcher(self): """Create matcher for attribute in variable :id: f0b3d51a-cf9a-4b43-9567-eb12cd973299 :steps: Create a matcher with all valid values :expectedresults: The matcher has been created successfully :CaseImportance: Critical """ value = gen_string('alpha') smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, ).create() entities.OverrideValue( smart_variable=smart_variable, match='domain=example.com', value=value, ).create() smart_variable = smart_variable.read() self.assertEqual( smart_variable.override_values[0]['match'], 'domain=example.com') self.assertEqual( smart_variable.override_values[0]['value'], value) @run_only_on('sat') @stubbed() @tier1 def test_positive_update_variable_attribute_priority(self): """Variable value set on Attribute Priority for Host :id: 78474b5e-7a50-4de0-b22c-3413ac06d067 :bz: 1362372 :steps: 1. Create variable with some valid value and type 2. Set fqdn as top priority attribute 3. Create first matcher for fqdn with valid details 4. Create second matcher for some attribute with valid details Note - The FQDN/host should have this attribute 5. Check ENC output of associated host. :expectedresults: The ENC output shows variable value of fqdn matcher only :caseautomation: notautomated :CaseImportance: Critical """ @run_only_on('sat') @stubbed() @tier1 def test_negative_update_variable_attribute_priority(self): """Matcher Value set on Attribute Priority for Host - alternate priority :id: f6ef2193-5d63-43f1-8d91-e30984b2c0c5 :bz: 1362372 :steps: 1. Create variable with valid value and type 2. Set some attribute(other than fqdn) as top priority attribute Note - The fqdn/host should have this attribute 3. Create first matcher for fqdn with valid details 4. Create second matcher for attribute of step 3 with valid details 5. Check ENC output of associated host. :expectedresults: The ENC output shows variable value of step 4 matcher only :caseautomation: notautomated :CaseImportance: Critical """ @run_only_on('sat') @stubbed() @tier1 def test_positive_update_variable_merge_override(self): """Merge the values of all the associated matchers Note - This TC is only for array and hash key types :id: bb37995e-71f9-441c-b4d5-79e5b5ff3973 :bz: 1362372 :steps: 1. Create variable with valid value and type 2. Create first matcher for attribute fqdn with valid details 3. Create second matcher for other attribute with valid details. Note - The fqdn/host should have this attribute 4. Create more matchers for some more attributes if any Note - The fqdn/host should have this attributes 5. Set 'merge overrides' to True 6. Check ENC output of associated host :expectedresults: 1. The ENC output shows variable values merged from all the associated matchers 2. The variable doesn't show the default value of variable. 3. Duplicate values in any are displayed :caseautomation: notautomated :CaseImportance: Critical """ @run_only_on('sat') @stubbed() @tier1 def test_negative_update_variable_merge_override(self): """Merge the override values from non associated matchers Note - This TC is only for array and hash key types :id: afcb7ef4-38dd-484b-8a02-bc4e3d027204 :bz: 1362372 :steps: 1. Create variable with valid value and type 2. Create first matcher for attribute fqdn with valid details 3. Create second matcher for other attribute with valid details Note - The fqdn/host should not have this attribute 4. Create more matchers for some more attributes if any Note - The fqdn/host should not have this attributes 5. Set 'merge overrides' to True 6. Check ENC output of associated host :expectedresults: 1. The ENC output shows variable values only for fqdn 2. The variable doesn't have the values for attribute which are not associated to host 3. The variable doesn't have the default value of variable 4. Duplicate values if any are displayed :caseautomation: notautomated :CaseImportance: Critical """ @run_only_on('sat') @stubbed() @tier1 def test_positive_update_variable_merge_default(self): """Merge the values of all the associated matchers + default value Note - This TC is only for array and hash key types :id: 9607c52c-f4c7-468b-a741-d179de144646 :bz: 1362372 :steps: 1. Create variable with valid value and type 2. Create first matcher for attribute fqdn with valid details 3. Create second matcher for other attribute with valid details Note - The fqdn/host should have this attribute 4. Create more matchers for some more attributes if any Note - The fqdn/host should have this attributes 5. Set 'merge overrides' to True 6. Set 'merge default' to True 7. Check ENC output of associated host :expectedresults: 1. The ENC output shows the variable values merged from all the associated matchers 2. The variable values has the default value of variable 3. Duplicate values if any are displayed :caseautomation: notautomated :CaseImportance: Critical """ @run_only_on('sat') @stubbed() @tier1 def test_negative_update_variable_merge_default(self): """Empty default value is not shown in merged values Note - This TC is only for array and hash key types :id: 9033de15-f7e8-42be-b2be-c04c13aa039b :bz: 1362372 :steps: 1. Create variable with empty value and type 2. Create first matcher for attribute fqdn with valid details 3. Create second matcher for other attribute with valid details Note - The fqdn/host should have this attribute 4. Create more matchers for some more attributes if any Note - The fqdn/host should have this attributes 5. Set 'merge overrides' to True 6. Set 'merge default' to True 7. Check ENC output of associated host :expectedresults: 1. The ENC output shows variable values merged from all the associated matchers 2. The variable doesn't have the empty default value of variable 3. Duplicate values if any are displayed :caseautomation: notautomated :CaseImportance: Critical """ @run_only_on('sat') @stubbed() @tier1 def test_positive_update_variable_avoid_duplicate(self): """Merge the values of all the associated matchers, remove duplicates Note - This TC is only for array and hash key types :id: fcb2dfb9-64d6-4647-bbcc-3e5c900aca1b :bz: 1362372 :steps: 1. Create variable with valid value and type 2. Create first matcher for attribute fqdn with some value 3. Create second matcher for other attribute with same value as fqdn matcher. Note - The fqdn/host should have this attribute 4. Set 'merge overrides' to True 5. Set 'merge default' to True 6. Set 'avoid duplicate' to True 7. Check ENC output of associated host :expectedresults: 1. The ENC output shows the variable values merged from all the associated matchers 2. The variable shows the default value of variable 3. Duplicate values are removed / not displayed :caseautomation: notautomated :CaseImportance: Critical """ @run_only_on('sat') @stubbed() @tier1 def test_negative_update_variable_avoid_duplicate(self): """Duplicates are not removed as they were not really present Note - This TC is only for array and hash key types :id: 1f8a06de-0c53-424e-b2c9-b48a580d6298 :bz: 1362372 :steps: 1. Create variable with valid value and type 2. Create first matcher for attribute fqdn with some value 3. Create second matcher for other attribute with other value than fqdn matcher and default value. Note - The fqdn/host should have this attribute 4. Set 'merge overrides' to True 5. Set 'merge default' to True 6. Set 'avoid duplicates' to True 7. Check ENC output of associated host :expectedresults: 1. The ENC output shows the variable values merged from all matchers 2. The variable shows default value of variable 3. No value removed as duplicate value :caseautomation: notautomated :CaseImportance: Critical """ @run_only_on('sat') @tier1 def test_positive_enable_merge_overrides_and_default_flags(self): """Enable Merge Overrides, Merge Default flags for supported types :id: af2c16e1-9a78-4615-9bc3-34fadca6a179 :steps: Set variable type to array/hash :expectedresults: The Merge Overrides, Merge Default flags are enabled to set :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value=[gen_integer()], variable_type='array', ).create() smart_variable.merge_overrides = True smart_variable.merge_default = True smart_variable.update(['merge_overrides', 'merge_default']) smart_variable.read() self.assertEqual(smart_variable.merge_overrides, True) self.assertEqual(smart_variable.merge_default, True) @run_only_on('sat') @tier1 def test_negative_enable_merge_overrides_default_flags(self): """Disable Merge Overrides, Merge Default flags for non supported types :id: f62a7e23-6fb4-469a-8589-4c987ff589ef :steps: Set variable type other than array/hash :expectedresults: The Merge Overrides, Merge Default flags are not enabled to set :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value='50', variable_type='string', ).create() with self.assertRaises(HTTPError) as context: smart_variable.merge_overrides = True smart_variable.update(['merge_overrides']) self.assertRegexpMatches( context.exception.response.text, "Validation failed: Merge overrides can only be set for " "array or hash" ) with self.assertRaises(HTTPError) as context: smart_variable.merge_default = True smart_variable.update(['merge_default']) self.assertRegexpMatches( context.exception.response.text, "Validation failed: Merge default can only be set when merge " "overrides is set" ) smart_variable = smart_variable.read() self.assertEqual(smart_variable.merge_overrides, False) self.assertEqual(smart_variable.merge_default, False) @run_only_on('sat') @tier1 def test_positive_enable_avoid_duplicates_flag(self): """Enable Avoid duplicates flag for supported type :id: 98fb1884-ad2b-45a0-b376-66bbc5ef6f72 :steps: 1. Set variable type to array 2. Set 'merge overrides' to True :expectedresults: The Avoid Duplicates is enabled to set to True :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value=[gen_integer()], variable_type='array', ).create() smart_variable.merge_overrides = True smart_variable.avoid_duplicates = True smart_variable.update(['merge_overrides', 'avoid_duplicates']) self.assertEqual(smart_variable.merge_overrides, True) self.assertEqual(smart_variable.avoid_duplicates, True) @run_only_on('sat') @tier1 def test_negative_enable_avoid_duplicates_flag(self): """Disable Avoid duplicates flag for non supported types :id: c7a2f718-6346-4851-b5f1-ab36c2fa8c6a :steps: Set variable type other than array :expectedresults: 1. The Merge Overrides flag is only enabled to set for type hash other than array 2. The Avoid duplicates flag not enabled to set for any type than array :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, default_value=True, variable_type='boolean', ).create() with self.assertRaises(HTTPError) as context: smart_variable.merge_overrides = True smart_variable.update(['merge_overrides']) self.assertRegexpMatches( context.exception.response.text, "Validation failed: Merge overrides can only be set for " "array or hash" ) with self.assertRaises(HTTPError) as context: smart_variable.avoid_duplicates = True smart_variable.update(['avoid_duplicates']) self.assertRegexpMatches( context.exception.response.text, "Validation failed: Avoid duplicates can only be set for arrays " "that have merge_overrides set to true" ) smart_variable = smart_variable.read() self.assertEqual(smart_variable.merge_overrides, False) self.assertEqual(smart_variable.avoid_duplicates, False) @run_only_on('sat') @tier1 def test_positive_remove_matcher(self): """Removal of matcher from variable :id: 7a932a99-2bd9-43ee-bcda-2b01a389787c :steps: 1. Create the variable and create a matcher for some attribute 2. Remove the matcher created in step 1 :expectedresults: The matcher removed from variable :CaseImportance: Critical """ value = gen_string('alpha') smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, override_value_order='is_virtual', ).create() matcher = entities.OverrideValue( smart_variable=smart_variable, match='is_virtual=true', value=value, ).create() smart_variable = smart_variable.read() self.assertEqual( smart_variable.override_values[0]['match'], 'is_virtual=true') self.assertEqual( smart_variable.override_values[0]['value'], value) matcher.delete() self.assertEqual(len(smart_variable.read().override_values), 0) @run_only_on('sat') @tier2 def test_positive_impact_variable_delete_attribute(self): """Impact on variable after deleting associated attribute :id: d4faec04-be29-48e6-8585-10ff1c361a9e :steps: 1. Create a variable and matcher for some attribute 2. Delete the attribute 3. Recreate the attribute with same name as earlier :expectedresults: 1. The matcher for deleted attribute removed from variable 2. On recreating attribute, the matcher should not reappear in variable :CaseLevel: Integration """ hostgroup_name = gen_string('alpha') matcher_value = gen_string('alpha') # Create variable smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, ).create() # Create hostgroup and add puppet class to it hostgroup = entities.HostGroup( name=hostgroup_name, environment=self.env, ).create() hostgroup.add_puppetclass( data={'puppetclass_id': self.puppet_class.id}) # Create matcher entities.OverrideValue( smart_variable=smart_variable, match='hostgroup={0}'.format(hostgroup_name), value=matcher_value, ).create() smart_variable = smart_variable.read() self.assertEqual( smart_variable.override_values[0]['match'], 'hostgroup={0}'.format(hostgroup_name) ) self.assertEqual( smart_variable.override_values[0]['value'], matcher_value) # Delete hostgroup hostgroup.delete() self.assertEqual(len(smart_variable.read().override_values), 0) # Recreate hostgroup hostgroup = entities.HostGroup( name=hostgroup_name, environment=self.env, ).create() hostgroup.add_puppetclass( data={'puppetclass_id': self.puppet_class.id}) self.assertEqual(len(smart_variable.read().override_values), 0) @run_only_on('sat') @tier1 def test_positive_hide_variable_default_value(self): """Hide the default value of variable :id: 04bed7fa8-a5be-4fc0-8e9b-d68da00f8de0 :steps: 1. Create variable with valid type and value 2. Set 'Hidden Value' flag to true :expectedresults: The 'hidden value' flag is set :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, hidden_value=True, ).create() self.assertEqual(getattr(smart_variable, 'hidden_value?'), True) self.assertEqual(smart_variable.default_value, u'*****') @run_only_on('sat') @tier1 def test_positive_unhide_variable_default_value(self): """Unhide the default value of variable :id: e8b3ec03-1abb-48d8-9409-17178bb887cb :steps: 1. Create variable with valid type and value 2. Set 'Hidden Value' flag to True 3. After hiding, set the 'Hidden Value' flag to False :expectedresults: The 'hidden value' flag set to false :CaseImportance: Critical """ smart_variable = entities.SmartVariable( puppetclass=self.puppet_class, hidden_value=True, ).create() self.assertEqual(getattr(smart_variable, 'hidden_value?'), True) smart_variable.hidden_value = False smart_variable.update(['hidden_value']) smart_variable = smart_variable.read() self.assertEqual(getattr(smart_variable, 'hidden_value?'), False) @run_only_on('sat') @tier1 def test_positive_update_hidden_value_in_variable(self): """Update the hidden default value of variable :id: 21b5586e-9434-45ea-ae85-12e24c549412 :steps: 1. Create variable with valid type and value 2. Set 'Hidden Value' flag to true 3. Now in hidden state, update the default value :expectedresults: 1. The variable default value is updated 2. The 'hidden value' flag set to True :CaseImportance: Critical """ value = gen_string('alpha') smart_variable = entities.SmartVariable( puppetclass=self.puppet_class,<|fim▁hole|> self.assertEqual(smart_variable.default_value, u'*****') smart_variable.default_value = value smart_variable.update(['default_value']) smart_variable = smart_variable.read(params={'show_hidden': 'true'}) self.assertEqual(smart_variable.default_value, value) self.assertEqual(getattr(smart_variable, 'hidden_value?'), True)<|fim▁end|>
default_value=gen_string('alpha'), hidden_value=True, ).create() self.assertEqual(getattr(smart_variable, 'hidden_value?'), True)
<|file_name|>nested_macro_privacy.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(decl_macro)] macro n($foo:ident, $S:ident, $i:ident, $m:ident) { mod $foo { #[derive(Default)] pub struct $S { $i: u32 } pub macro $m($e:expr) { $e.$i } } } n!(foo, S, i, m); <|fim▁hole|>fn main() { use foo::{S, m}; S::default().i; //~ ERROR field `i` of struct `foo::S` is private m!(S::default()); // ok }<|fim▁end|>
<|file_name|>tuntap_tests.py<|end_file_name|><|fim▁begin|># Copyright (c) 2011 Mattias Nissler <[email protected]> # # Redistribution and use in source and binary forms, with or without modification, are permitted # provided that the following conditions are met:<|fim▁hole|># 2. Redistributions in binary form must reproduce the above copyright notice, this list of # conditions and the following disclaimer in the documentation and/or other materials provided # with the distribution. # 3. The name of the author may not be used to endorse or promote products derived from this # software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED # TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import argparse import itertools import re import sys import unittest from tuntap.packet_codec import TunPacketCodec, TunAFPacketCodec, TapPacketCodec from tuntap.packet_reader import BlockingPacketSource, SelectPacketSource from tuntap.test_char_dev import TestTunCharDev, TestTapCharDev from tuntap.test_interface import TestTunInterface, TestTapInterface from tuntap.test_ip import TestIp, TestIp6, TestMulticast, TestMulticast6 class FilteringTestSuite(unittest.TestSuite): def __init__(self, filter): super(FilteringTestSuite, self).__init__() self._matcher = re.compile(filter or '.*') def __iter__(self): return itertools.ifilter(lambda test : self._matcher.search(str(test)), super(FilteringTestSuite, self).__iter__()) def loadTestsFromTestCase(testCaseClass, *args, **kwargs): testCaseNames = unittest.getTestCaseNames(testCaseClass, 'test_') return unittest.TestSuite(map(lambda n : testCaseClass(n, *args, **kwargs), testCaseNames)) def main(argv): # Parse the command line. parser = argparse.ArgumentParser(description = 'Run tuntap unit tests.') parser.add_argument('--tests', type = str, nargs = '?', default = None, help = 'tests to run') parser.add_argument('--verbosity', type = int, nargs = '?', default = 2, help = 'verbosity level') options = parser.parse_args(argv[1:]) # Gather tests and run them. loader = unittest.TestLoader() suite = FilteringTestSuite(options.tests) suite.addTests(loadTestsFromTestCase(TestTunCharDev)) suite.addTests(loadTestsFromTestCase(TestTapCharDev)) suite.addTests(loadTestsFromTestCase(TestTunInterface)) suite.addTests(loadTestsFromTestCase(TestTapInterface)) codecs = (TunPacketCodec, TunAFPacketCodec, TapPacketCodec) sources = (SelectPacketSource, BlockingPacketSource) tests = (TestIp, TestIp6, TestMulticast, TestMulticast6) for (test, codec, source) in [ (test, codec, source) for test in tests for codec in codecs for source in sources ]: suite.addTests(loadTestsFromTestCase(test, lambda af, addr: codec(af, addr, source))) runner = unittest.TextTestRunner(stream = sys.stderr, descriptions = True, verbosity = options.verbosity) runner.run(suite) if __name__ == '__main__': main(sys.argv)<|fim▁end|>
# # 1. Redistributions of source code must retain the above copyright notice, this list of # conditions and the following disclaimer.
<|file_name|>admin.py<|end_file_name|><|fim▁begin|><|fim▁hole|>itemtypes_tables.update({ 'lu_int_wms': 'lux_layer_internal_wms', 'lu_ext_wms': 'lux_layer_external_wms', })<|fim▁end|>
from c2cgeoportal_admin.views.layertree import itemtypes_tables
<|file_name|>CreateSpeciesMutation.js<|end_file_name|><|fim▁begin|>import { commitMutation, graphql } from 'react-relay' import { ConnectionHandler } from 'relay-runtime' const mutation = graphql` mutation CreateSpeciesMutation($input: CreateSpeciesInput!) { createSpecies(input: $input) { clientMutationId<|fim▁hole|> id nodeId air temp water soil } } query { ...SpeciesList_query } } } ` let nextClientMutationId = 0 const commit = (environment, { species, viewer }) => new Promise((resolve, reject) => { const clientMutationId = nextClientMutationId++ const variables = { input: { clientMutationId, species: { ...species, authorId: viewer.id, }, }, } return commitMutation(environment, { mutation, variables, onError: (error: Error) => { reject(error) }, onCompleted: (response: Object) => { resolve(response) }, // See https://github.com/facebook/relay/issues/1701#issuecomment-301012344 // and also https://github.com/facebook/relay/issues/1701#issuecomment-300995425 // and also https://github.com/facebook/relay/issues/1701 updater: store => { const payload = store.getRootField('createSpecies') const newEdge = payload.getLinkedRecord('speciesEdge') const storeRoot = store.getRoot() const connection = ConnectionHandler.getConnection( storeRoot, 'SpeciesList_species', { first: 2147483647, orderBy: 'GENUS_ASC' } ) if (connection) { ConnectionHandler.insertEdgeBefore(connection, newEdge) } else { console.error('No connection found') } }, }) }) export default { commit }<|fim▁end|>
speciesEdge { node { authorId
<|file_name|>test_brightbox.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import unittest from libcloud.utils.py3 import httplib from libcloud.loadbalancer.base import Member, Algorithm from libcloud.loadbalancer.drivers.brightbox import BrightboxLBDriver from libcloud.loadbalancer.types import State <|fim▁hole|>from libcloud.test.file_fixtures import LoadBalancerFileFixtures class BrightboxLBTests(unittest.TestCase): def setUp(self): BrightboxLBDriver.connectionCls.conn_classes = (None, BrightboxLBMockHttp) BrightboxLBMockHttp.type = None self.driver = BrightboxLBDriver(*LB_BRIGHTBOX_PARAMS) def test_list_protocols(self): protocols = self.driver.list_protocols() self.assertEqual(len(protocols), 2) self.assertTrue('tcp' in protocols) self.assertTrue('http' in protocols) def test_list_balancers(self): balancers = self.driver.list_balancers() self.assertEqual(len(balancers), 1) self.assertEqual(balancers[0].id, 'lba-1235f') self.assertEqual(balancers[0].name, 'lb1') def test_get_balancer(self): balancer = self.driver.get_balancer(balancer_id='lba-1235f') self.assertEqual(balancer.id, 'lba-1235f') self.assertEqual(balancer.name, 'lb1') self.assertEqual(balancer.state, State.RUNNING) def test_destroy_balancer(self): balancer = self.driver.get_balancer(balancer_id='lba-1235f') self.assertTrue(self.driver.destroy_balancer(balancer)) def test_create_balancer(self): members = [Member('srv-lv426', None, None)] balancer = self.driver.create_balancer(name='lb2', port=80, protocol='http', algorithm=Algorithm.ROUND_ROBIN, members=members) self.assertEqual(balancer.name, 'lb2') self.assertEqual(balancer.port, 80) self.assertEqual(balancer.state, State.PENDING) def test_balancer_list_members(self): balancer = self.driver.get_balancer(balancer_id='lba-1235f') members = balancer.list_members() self.assertEqual(len(members), 1) self.assertEqual(members[0].balancer, balancer) self.assertEqual('srv-lv426', members[0].id) def test_balancer_attach_member(self): balancer = self.driver.get_balancer(balancer_id='lba-1235f') member = balancer.attach_member(Member('srv-kg983', ip=None, port=None)) self.assertEqual(member.id, 'srv-kg983') def test_balancer_detach_member(self): balancer = self.driver.get_balancer(balancer_id='lba-1235f') member = Member('srv-lv426', None, None) self.assertTrue(balancer.detach_member(member)) class BrightboxLBMockHttp(MockHttpTestCase): fixtures = LoadBalancerFileFixtures('brightbox') def _token(self, method, url, body, headers): if method == 'POST': return self.response(httplib.OK, self.fixtures.load('token.json')) def _1_0_load_balancers(self, method, url, body, headers): if method == 'GET': return self.response(httplib.OK, self.fixtures.load('load_balancers.json')) elif method == 'POST': body = self.fixtures.load('load_balancers_post.json') return self.response(httplib.ACCEPTED, body) def _1_0_load_balancers_lba_1235f(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('load_balancers_lba_1235f.json') return self.response(httplib.OK, body) elif method == 'DELETE': return self.response(httplib.ACCEPTED, '') def _1_0_load_balancers_lba_1235f_add_nodes(self, method, url, body, headers): if method == 'POST': return self.response(httplib.ACCEPTED, '') def _1_0_load_balancers_lba_1235f_remove_nodes(self, method, url, body, headers): if method == 'POST': return self.response(httplib.ACCEPTED, '') def response(self, status, body): return (status, body, {'content-type': 'application/json'}, httplib.responses[status]) if __name__ == "__main__": sys.exit(unittest.main())<|fim▁end|>
from libcloud.test import MockHttpTestCase from libcloud.test.secrets import LB_BRIGHTBOX_PARAMS
<|file_name|>ProgressBar.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>GUI.Util.ProgressBar<|fim▁end|>
GUI.Util.ProgressBar$1
<|file_name|>errata.py<|end_file_name|><|fim▁begin|># # Copyright (c) 2008--2010 Red Hat, Inc. # # This software is licensed to you under the GNU General Public License, # version 2 (GPLv2). There is NO WARRANTY for this software, express or # implied, including the implied warranties of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 # along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. # # Red Hat trademarks are not licensed under GPLv2. No permission is # granted to use or replicate Red Hat trademarks that are incorporated<|fim▁hole|># # Implements the errata.* functions for XMLRPC # # common modules imports from spacewalk.common.rhnTranslate import _ from spacewalk.common import rhnFault, rhnFlags, log_debug, log_error # server modules imports from spacewalk.server.rhnLib import parseRPMName from spacewalk.server.rhnHandler import rhnHandler from spacewalk.server import rhnSQL, rhnCapability class Errata(rhnHandler): """ Errata class --- retrieve (via xmlrpc) package errata. """ def __init__(self): rhnHandler.__init__(self) # Exposed Errata functions: self.functions = [] self.functions.append('GetByPackage') # Clients v1- self.functions.append('getPackageErratum') # Clients v2+ self.functions.append('getErrataInfo') # clients v2+ def GetByPackage(self, pkg, osRel): """ Clients v1- Get errata for a package given "n-v-r" format IN: pkg: "n-v-r" (old client call) or [n,v,r] osRel: OS release RET: a hash by errata that applies to this package (ie, newer packages are available). We also limit the scope for a particular osRel. """ if type(pkg) == type(''): # Old client support. pkg = parseRPMName(pkg) log_debug(1, pkg, osRel) # Stuff the action in the headers: transport = rhnFlags.get('outputTransportOptions') transport['X-RHN-Action'] = 'GetByPackage' # now look up the errata if type(pkg[0]) != type(''): log_error("Invalid package name: %s %s" % (type(pkg[0]), pkg[0])) raise rhnFault(30, _("Expected a package name, not: %s") % pkg[0]) #bug#186996:adding synopsis field to advisory info #client side changes are needed to access this data. h = rhnSQL.prepare(""" select distinct e.id errata_id, e.advisory_type errata_type, e.advisory advisory, e.topic topic, e.description description, e.synopsis synopsis from rhnErrata e, rhnPublicChannelFamily pcf, rhnChannelFamilyMembers cfm, rhnErrataPackage ep, rhnChannelPackage cp, rhnChannelErrata ce, rhnDistChannelMap dcm, rhnPackage p where 1=1 and p.name_id = LOOKUP_PACKAGE_NAME(:name) -- map to a channel and p.id = cp.package_id and cp.channel_id = dcm.channel_id and dcm.release = :dist -- map to an errata as well and p.id = ep.package_id and ep.errata_id = e.id -- the errata and the channel have to be linked and ce.channel_id = cp.channel_id -- and the channel has to be public and cp.channel_id = cfm.channel_id and cfm.channel_family_id = pcf.channel_family_id -- and get the erratum and e.id = ce.errata_id """) h.execute(name = pkg[0], dist = str(osRel)) ret = [] # sanitize the results for display in the clients while 1: row = h.fetchone_dict() if row is None: break for k in row.keys(): if row[k] is None: row[k] = "N/A" ret.append(row) return ret def getPackageErratum(self, system_id, pkg): """ Clients v2+ - Get errata for a package given [n,v,r,e,a,...] format Sing-along: You say erratum(sing), I say errata(pl)! :) IN: pkg: [n,v,r,e,s,a,ch,...] RET: a hash by errata that applies to this package """ log_debug(5, system_id, pkg) if type(pkg) != type([]) or len(pkg) < 7: log_error("Got invalid package specification: %s" % str(pkg)) raise rhnFault(30, _("Expected a package, not: %s") % pkg) # Authenticate and decode server id. self.auth_system(system_id) # log the entry log_debug(1, self.server_id, pkg) # Stuff the action in the headers: transport = rhnFlags.get('outputTransportOptions') transport['X-RHN-Action'] = 'getPackageErratum' name, ver, rel, epoch, arch, size, channel = pkg[:7] if epoch in ['', 'none', 'None']: epoch = None # XXX: also, should arch/size/channel ever be used? #bug#186996:adding synopsis field to errata info #client side changes are needed to access this data. h = rhnSQL.prepare(""" select distinct e.id errata_id, e.advisory_type errata_type, e.advisory advisory, e.topic topic, e.description description, e.synopsis synopsis from rhnServerChannel sc, rhnChannelPackage cp, rhnChannelErrata ce, rhnErrata e, rhnErrataPackage ep, rhnPackage p where p.name_id = LOOKUP_PACKAGE_NAME(:name) and p.evr_id = LOOKUP_EVR(:epoch, :ver, :rel) -- map to a channel and p.id = cp.package_id -- map to an errata as well and p.id = ep.package_id and ep.errata_id = e.id -- the errata and the channel have to be linked and e.id = ce.errata_id and ce.channel_id = cp.channel_id -- and the server has to be subscribed to the channel and cp.channel_id = sc.channel_id and sc.server_id = :server_id """) # " emacs sucks h.execute(name = name, ver = ver, rel = rel, epoch = epoch, server_id = str(self.server_id)) ret = [] # sanitize the results for display in the clients while 1: row = h.fetchone_dict() if row is None: break for k in row.keys(): if row[k] is None: row[k] = "N/A" ret.append(row) return ret # I don't trust this errata_id business, but chip says "trust me" def getErrataInfo(self, system_id, errata_id): log_debug(5, system_id, errata_id) # Authenticate the server certificate self.auth_system(system_id) # log this thing log_debug(1, self.server_id, errata_id) client_caps = rhnCapability.get_client_capabilities() log_debug(3,"Client Capabilities", client_caps) multiarch = 0 cap_info = None if client_caps and client_caps.has_key('packages.update'): cap_info = client_caps['packages.update'] if cap_info and cap_info['version'] > 1: multiarch = 1 statement = """ select distinct pn.name, pe.epoch, pe.version, pe.release, pa.label arch from rhnPackageName pn, rhnPackageEVR pe, rhnPackage p, rhnPackageArch pa, rhnChannelPackage cp, rhnServerChannel sc, rhnErrataPackage ep where ep.errata_id = :errata_id and ep.package_id = p.id and p.name_id = pn.id and p.evr_id = pe.id and p.package_arch_id = pa.id and sc.server_id = :server_id and sc.channel_id = cp.channel_id and cp.package_id = p.id """ h = rhnSQL.prepare(statement) h.execute(errata_id = errata_id, server_id = self.server_id) packages = h.fetchall_dict() ret = [] if not packages: return [] for package in packages: if package['name'] is not None: if package['epoch'] is None: package['epoch'] = "" pkg_arch = '' if multiarch: pkg_arch = package['arch'] or '' ret.append([package['name'], package['version'], package['release'], package['epoch'], pkg_arch]) return ret #----------------------------------------------------------------------------- if __name__ == "__main__": print "You can not run this module by itself" import sys; sys.exit(-1) #-----------------------------------------------------------------------------<|fim▁end|>
# in this software or its documentation.
<|file_name|>metering.py<|end_file_name|><|fim▁begin|>import ceilometerclient.client as clclient import logging log = logging.getLogger(__name__) class Metering: '''Wrapper for the OpenStack MEtering service (Ceilometer)''' def __init__(self, conf): creds = self._get_creds(conf) self.ceilo = clclient.get_client(2, **creds) def _get_creds(self, conf):<|fim▁hole|> d['os_password'] = conf.get("environment", "OS_PASSWORD") d['os_auth_url'] = conf.get("environment", "OS_AUTH_URL") d['os_tenant_name'] = conf.get("environment", "OS_TENANT_NAME") return d def meter_list(self, query=None): return self.ceilo.meters.list()<|fim▁end|>
d = {} d['os_username'] = conf.get("environment", "OS_USERNAME")
<|file_name|>update_service.go<|end_file_name|><|fim▁begin|>// Copyright 2016-2021 The Libsacloud Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|>// limitations under the License. package icon import ( "context" "fmt" "github.com/sacloud/libsacloud/v2/sacloud" ) func (s *Service) Update(req *UpdateRequest) (*sacloud.Icon, error) { return s.UpdateWithContext(context.Background(), req) } func (s *Service) UpdateWithContext(ctx context.Context, req *UpdateRequest) (*sacloud.Icon, error) { if err := req.Validate(); err != nil { return nil, err } client := sacloud.NewIconOp(s.caller) current, err := client.Read(ctx, req.ID) if err != nil { return nil, fmt.Errorf("reading Icon[%s] failed: %s", req.ID, err) } params, err := req.ToRequestParameter(current) if err != nil { return nil, fmt.Errorf("processing request parameter failed: %s", err) } return client.Update(ctx, req.ID, params) }<|fim▁end|>
// See the License for the specific language governing permissions and
<|file_name|>sign.py<|end_file_name|><|fim▁begin|>import chainer from chainer import backend from chainer import utils def sign(x): """Elementwise sign function. For a given input :math:`x`, this function returns :math:`sgn(x)` defined as .. math:: sgn(x) = \\left \\{ \\begin{array}{cc} -1 & {\\rm if~x < 0} \\\\ 0 & {\\rm if~x = 0} \\\\ 1 & {\\rm if~x > 0} \\\\ \\end{array} \\right. .. note:: The gradient of this function is ``None`` everywhere and therefore unchains the computational graph. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable for which the sign is computed. Returns: ~chainer.Variable: Output variable.<|fim▁hole|> x = x.array xp = backend.get_array_module(x) return chainer.as_variable(utils.force_array(xp.sign(x)))<|fim▁end|>
""" if isinstance(x, chainer.variable.Variable):
<|file_name|>Matches.java<|end_file_name|><|fim▁begin|>/******************************************************************************* * Copyright (c) 2008,2010 itemis AG (http://www.itemis.eu) and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at<|fim▁hole|>package org.eclipse.emf.mwe2.runtime; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) public @interface Matches { String value(); }<|fim▁end|>
* http://www.eclipse.org/legal/epl-v10.html * *******************************************************************************/
<|file_name|>currie.py<|end_file_name|><|fim▁begin|>""" Currie -- you can do magic. Goal here is to have a thread launching piglets. And curio controlling the operation. Aim to use joy to control which widget loop to use. No piglets known to be harmed with this code. So there is a pig farm and piglets running everywhere. And currie doing magic. """ from collections import deque import curio from pathlib import Path import inspect from karmapi import hush, base from tkinter import Toplevel from karmapi import pigfarm def main(): import argparse <|fim▁hole|> parser.add_argument('--gallery', nargs='*', default=['.', '../gallery']) parser.add_argument('--images', default=False, action='store_true') parser.add_argument('--thresh', type=float, default=10.0) parser.add_argument('--monitor', action='store_true') parser.add_argument('--nomon', action='store_false', default=True) parser.add_argument('--words', default='diceware.wordlist.asc') parser.add_argument( '--files', nargs='+', default=__file__) parser.add_argument('--nomick', action='store_true') parser.add_argument('--sense', action='store_true', help="if you have a sense hat") parser.add_argument('--path') args = parser.parse_args() # import from pig stuff here, after talking to joy from karmapi import joy joy.set_backend(args.pig) from karmapi import piglet from karmapi import widgets from karmapi import sonogram # what's this doing here? #import tkinter farm = pigfarm.PigFarm() print('building farm') farm.status() from karmapi.mclock2 import GuidoClock from karmapi.tankrain import TankRain from karmapi import diceware as dice from karmapi import talk if args.monitor: from karmapi import milk farm.add(milk.Curio) images = [ dict(image='climate_karma_pi_and_jupyter.png', title=''), dict(image='gil_ly_.png', title=''), dict(image='princess_cricket.jpg', title='Princess Cricket'), dict(image='fork_in_road.jpg', title='Fork in the Road'), dict(image='tree_of_hearts.jpg', title='Tree of Hearts'), #dict(image='chess.jpg', title='Branching'), dict(image='lock.jpg', title='Global Interpreter Lock'), dict(image='air_water.jpg', title='async def(): await run()'), dict(image='venus.jpg', title='Jupyter')] from karmapi import sunny, noddy farm.files = args.files print('galleries', args.gallery) im_info = dict(galleries=args.gallery) if args.images: for im in images: im_info.update(im) farm.add(piglet.Image, im_info.copy()) words = Path(args.words) if words.exists(): words = words.open() else: words = None data = None if args.path: path = Path(args.path) if path.exists(): data = base.load_folder(path) farm.add(noddy.Magic, dict(data=data)) #farm.add(talk.Talk) farm.add(dice.StingingBats, dict(words=words)) farm.add(TankRain) #farm.add(sunny.Sunspot) farm.add(sonogram.SonoGram) farm.add(piglet.XKCD) farm.add(widgets.InfinitySlalom) from karmapi import prime farm.add(prime.Prime) farm.add(GuidoClock) if args.sense: from karmapi import sense farm.add(sense.WeatherHat) farm.add(sense.OrientHat) # add a couple of micks to the Farm if args.wave: farm.add_mick(hush.Connect(hush.open_wave(args.wave))) else: if not args.nomick: farm.add_mick(hush.Connect()) farm.add_mick(hush.Wave(mode='square')) farm.add_mick(hush.Wave()) farm.status() curio.run(farm.run(), with_monitor=args.nomon) if __name__ == '__main__': main()<|fim▁end|>
parser = argparse.ArgumentParser() parser.add_argument('--pig', default='tk') parser.add_argument('--wave')
<|file_name|>webpack.dev.js<|end_file_name|><|fim▁begin|>var webpack = require('webpack'); var helpers = require('./helpers'); var path = require('path'); var ExtractTextPlugin = require('extract-text-webpack-plugin'); var HtmlWebpackPlugin = require('html-webpack-plugin'); const ENV = process.env.NODE_ENV = process.env.ENV = 'development'; module.exports = { devtool: 'cheap-module-eval-source-map', entry: { 'polyfills': './app/polyfills.ts', 'vendor': './app/vendor.ts', 'app': './app/boot.ts' }, output: { path: helpers.root('dist'), publicPath: 'http://localhost:8084/', filename: '[name].js', chunkFilename: '[id].chunk.js' }, resolve: { extensions: ['', '.ts', '.js'] }, module: { loaders: [{ test: /\.ts$/, exclude: path.resolve(__dirname, "node_modules"), loaders: ['awesome-typescript-loader', 'angular2-template-loader', 'angular2-router-loader'] }, { test: /\.html$/, loader: 'html' }, { test: /\.(png|jpe?g|gif|ico)$/, loader: 'file-loader?name=images/[name].[ext]' }, { test: /\.woff(\?v=\d+\.\d+\.\d+)?$/, loader: 'url-loader?limit=10000&mimetype=application/font-woff' }, { test: /\.woff2(\?v=\d+\.\d+\.\d+)?$/, loader: 'url-loader?limit=10000&mimetype=application/font-woff' }, { test: /\.ttf(\?v=\d+\.\d+\.\d+)?$/, loader: 'url-loader?limit=10000&mimetype=application/octet-stream' }, { test: /\.eot(\?v=\d+\.\d+\.\d+)?$/, loader: 'file-loader' }, { test: /\.svg(\?v=\d+\.\d+\.\d+)?$/, loader: 'url-loader?limit=10000&mimetype=image/svg+xml' }, { // site wide css (excluding all css under the app dir) test: /\.css$/, exclude: helpers.root('app'), loader: ExtractTextPlugin.extract('style', 'css?sourceMap') }, { // included styles under the app directory - these are for styles included // with styleUrls test: /\.css$/, include: helpers.root('app'), loader: 'raw' }, { test: /\.scss$/, include: helpers.root('node_modules'), loader: ExtractTextPlugin.extract('style-loader', 'css-loader') }, ] }, plugins: [ new webpack.optimize.CommonsChunkPlugin({ name: ['app', 'vendor', 'polyfills'] }), new ExtractTextPlugin('[name].css'), new HtmlWebpackPlugin({ template: 'config/index.html' }), new webpack.DefinePlugin({ 'process.env.NODE_ENV': JSON.stringify('development'), 'process.env.APP_VERSION': JSON.stringify(process.env.npm_package_version), }) ], <|fim▁hole|> stats: 'minimal', } };<|fim▁end|>
devServer: { historyApiFallback: true,
<|file_name|>0011_document_html_text_string.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-02-25 01:59 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('pdfapp', '0010_auto_20170225_0034'), ] operations = [ migrations.AddField( model_name='document',<|fim▁hole|><|fim▁end|>
name='html_text_string', field=models.TextField(blank=True, null=True), ), ]
<|file_name|>path-lookahead.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // run-pass #![warn(unused)]<|fim▁hole|> return (<T as ToString>::to_string(&arg)); //~WARN unnecessary parentheses around `return` value } fn no_parens<T: ToString>(arg: T) -> String { //~WARN function is never used: `no_parens` return <T as ToString>::to_string(&arg); } fn main() { }<|fim▁end|>
// Parser test for #37765 fn with_parens<T: ToString>(arg: T) -> String { //~WARN function is never used: `with_parens`
<|file_name|>introspection.py<|end_file_name|><|fim▁begin|>from django.contrib.gis.gdal import OGRGeomType from django.db.backends.postgresql.introspection import DatabaseIntrospection class GeoIntrospectionError(Exception): pass class PostGISIntrospection(DatabaseIntrospection): # Reverse dictionary for PostGIS geometry types not populated until # introspection is actually performed. postgis_types_reverse = {} ignored_tables = DatabaseIntrospection.ignored_tables + [ 'geography_columns', 'geometry_columns', 'raster_columns', 'spatial_ref_sys',<|fim▁hole|> ] def get_postgis_types(self): """ Return a dictionary with keys that are the PostgreSQL object identification integers for the PostGIS geometry and/or geography types (if supported). """ field_types = [ ('geometry', 'GeometryField'), # The value for the geography type is actually a tuple # to pass in the `geography=True` keyword to the field # definition. ('geography', ('GeometryField', {'geography': True})), ] postgis_types = {} # The OID integers associated with the geometry type may # be different across versions; hence, this is why we have # to query the PostgreSQL pg_type table corresponding to the # PostGIS custom data types. oid_sql = 'SELECT "oid" FROM "pg_type" WHERE "typname" = %s' with self.connection.cursor() as cursor: for field_type in field_types: cursor.execute(oid_sql, (field_type[0],)) for result in cursor.fetchall(): postgis_types[result[0]] = field_type[1] return postgis_types def get_field_type(self, data_type, description): if not self.postgis_types_reverse: # If the PostGIS types reverse dictionary is not populated, do so # now. In order to prevent unnecessary requests upon connection # initialization, the `data_types_reverse` dictionary is not updated # with the PostGIS custom types until introspection is actually # performed -- in other words, when this function is called. self.postgis_types_reverse = self.get_postgis_types() self.data_types_reverse.update(self.postgis_types_reverse) return super().get_field_type(data_type, description) def get_geometry_type(self, table_name, geo_col): """ The geometry type OID used by PostGIS does not indicate the particular type of field that a geometry column is (e.g., whether it's a PointField or a PolygonField). Thus, this routine queries the PostGIS metadata tables to determine the geometry type. """ with self.connection.cursor() as cursor: try: # First seeing if this geometry column is in the `geometry_columns` cursor.execute('SELECT "coord_dimension", "srid", "type" ' 'FROM "geometry_columns" ' 'WHERE "f_table_name"=%s AND "f_geometry_column"=%s', (table_name, geo_col)) row = cursor.fetchone() if not row: raise GeoIntrospectionError except GeoIntrospectionError: cursor.execute('SELECT "coord_dimension", "srid", "type" ' 'FROM "geography_columns" ' 'WHERE "f_table_name"=%s AND "f_geography_column"=%s', (table_name, geo_col)) row = cursor.fetchone() if not row: raise Exception('Could not find a geometry or geography column for "%s"."%s"' % (table_name, geo_col)) # OGRGeomType does not require GDAL and makes it easy to convert # from OGC geom type name to Django field. field_type = OGRGeomType(row[2]).django # Getting any GeometryField keyword arguments that are not the default. dim = row[0] srid = row[1] field_params = {} if srid != 4326: field_params['srid'] = srid if dim != 2: field_params['dim'] = dim return field_type, field_params<|fim▁end|>
'raster_overviews',
<|file_name|>_debugger_case_breakpoint_remote_no_import.py<|end_file_name|><|fim▁begin|>if __name__ == '__main__':<|fim▁hole|> if root_dirname not in sys.path: sys.path.append(root_dirname) print('before pydevd.settrace') breakpoint(port=port) # Set up through custom sitecustomize.py print('after pydevd.settrace') print('TEST SUCEEDED!')<|fim▁end|>
import os import sys port = int(sys.argv[1]) root_dirname = os.path.dirname(os.path.dirname(__file__))
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|>import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "battleground.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)<|fim▁end|>
<|file_name|>BvMT+JZAM.text.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
__d("UPoCs",function(o,e,s){s.exports="Here is brisk demo!"});
<|file_name|>index.test.js<|end_file_name|><|fim▁begin|>// @flow import type { ConfigSparseWithSource } from '../../'; import configResolve from '../'; type Fixture = { name: string, in: ConfigSparseWithSource, }; const fixtures: Fixture[] = [ { name: '01 empty input config', in: {}, }, { name: '02 empty input config with configFile', in: { configFile: '/foo/bar/package.json', }, }, { name: '03 empty input config with rootDir', in: { rootDir: '/foo/bar', }, }, { name: '04 empty input config with both configFile and rootDir', in: { configFile: '/a/b/c/package.json', rootDir: '/foo/bar', }, }, ]; <|fim▁hole|> expect(received).toMatchSnapshot(); }); }); });<|fim▁end|>
describe('lib/config/resolve', () => { fixtures.forEach((fixture) => { it(fixture.name, () => { const received = configResolve(fixture.in);
<|file_name|>ws_util.py<|end_file_name|><|fim▁begin|>""" Some helper functions for workspace stuff """ import logging import re import biokbase import biokbase.workspace from biokbase.workspace import client as WorkspaceClient g_log = logging.getLogger(__name__) # regex for parsing out workspace_id and object_id from # a "ws.{workspace}.{object}" string ws_regex = re.compile('^ws\.(?P<wsid>\d+)\.obj\.(?P<objid>\d+)') # regex for parsing out a user_id from a token user_id_regex = re.compile('^un=(?P<user_id>\w+)\|') # Exception for a malformed workspace ID see regex above class BadWorkspaceID(Exception): pass # Exception for a workspace object not found see regex above class BadWorkspaceID(Exception): pass class PermissionsError(WorkspaceClient.ServerError): """Raised if user does not have permission to access the workspace. """ @staticmethod def is_permissions_error(err): """Try to guess if the error string is a permission-denied error for the narrative (i.e. the workspace the narrative is in). """ pat = re.compile("\s*[Uu]ser \w+ may not \w+ workspace.*") return pat.match(err) is not None def __init__(self, name=None, code=None, message=None, **kw): WorkspaceClient.ServerError.__init__(self, name, code, message, **kw) # List of fields returned by the list_workspace_objects function list_ws_obj_fields = ['id','type','moddate','instance','command', 'lastmodifier','owner','workspace','ref','chsum', 'metadata','objid'] # The list_workspace_objects method has been deprecated, the # list_objects method is the current primary method for fetching # objects, and has a different field list list_objects_fields = ['objid', 'name', 'type', 'save_date', 'ver', 'saved_by', 'wsid', 'workspace', 'chsum', 'size', 'meta'] obj_field = dict(zip(list_objects_fields,range(len(list_objects_fields)))) # object type for a project tag object ws_tag_type = 'KBaseNarrative.Metadata' # object type for a project tag object ws_narrative_type = 'KBaseNarrative.Narrative' # object name for project tag ws_tag = {'project' : '_project'} def get_wsobj_meta(wsclient, objtype=ws_narrative_type, ws_id=None): """ Takes an initialized workspace client. Defaults to searching for Narrative types in any workspace that the token has at least read access to. If the ws field is specified then it will return the workspace metadata for only the workspace specified Returns a dictionary of object descriptions - the key is a workspace id of the form "ws.{workspace_id}.obj.{object_id}" and the values are dictionaries keyed on the list_ws_obj_field list above. Raises: PermissionsError, if access is denied """ try: if ws_id is None: res = wsclient.list_objects({'type' : objtype, 'includeMetadata' : 1}) else: res = wsclient.list_objects({'type' : objtype, 'includeMetadata' : 1, 'ids' : [ws_id] }) except WorkspaceClient.ServerError, err: if PermissionsError.is_permissions_error(err.message): raise PermissionsError(name=err.name, code=err.code, message=err.message, data=err.data) my_narratives = {} for obj in res: my_narratives["ws.%s.obj.%s" % (obj[obj_field['wsid']],obj[obj_field['objid']])] = dict(zip(list_objects_fields,obj)) return my_narratives def get_wsid(wsclient, workspace): """ When given a workspace name, returns the numeric ws_id """ try: ws_meta = wsclient.get_workspace_info({'workspace' : workspace}); except WorkspaceClient.ServerError, e: if e.message.find('not found') >= 0 or e.message.find('No workspace with name') >= 0: return(None) else: raise e return( ws_meta[0]) def alter_workspace_metadata(wsclient, ref, new_metadata={}, ws_id=None): """ This is just a wrapper for the workspace get_objects call. Takes an initialized workspace client and a workspace ID of the form "ws.{ws_id}.obj.{object id}" and returns the following: { 'data' : {actual data contained in the object}, 'metadata' : { a dictionary version of the object metadata }, ... all the fields that are normally returned in a ws ObjectData type } if type is not specified then an extra lookup for object metadata is required, this can be shortcut by passing in the object type """ if ws_id is None and ref is not None: match = ws_regex.match(ref) if not match: raise BadWorkspaceID("%s does not match workspace ID format ws.{workspace id}.obj.{object id}" % ws_id) ws_id = match.group(1) elif ws_id is None and ref is None: raise BadWorkspaceID("No workspace id or object reference given!") wsclient.alter_workspace_metadata({'wsi':{'id':ws_id}, 'new':new_metadata}) def get_wsobj(wsclient, ws_id, objtype=None): """ This is just a wrapper for the workspace get_objects call. Takes an initialized workspace client and a workspace ID of the form "ws.{ws_id}.obj.{object id}" and returns the following: { 'data' : {actual data contained in the object}, 'metadata' : { a dictionary version of the object metadata }, ... all the fields that are normally returned in a ws ObjectData type } if type is not specified then an extra lookup for object metadata is required, this can be shortcut by passing in the object type """ match = ws_regex.match( ws_id) if not match: raise BadWorkspaceID("%s does not match workspace ID format ws.{workspace id}.obj.{object id}" % ws_id) ws = match.group(1) objid = match.group(2) objs = wsclient.get_objects([dict( wsid=ws, objid=objid)]) if len(objs) < 1: raise BadWorkspaceID( "%s could not be found" % ws_id) elif len(objs) > 1: raise BadWorkspaceID( "%s non-unique! Weird!!!" % ws_id) res=objs[0] res['metadata'] = dict(zip(list_objects_fields,objs[0]['info'])) return res def delete_wsobj(wsclient, wsid, objid): """ Given a workspace client, and numeric workspace id and object id, delete it returns true on success, false otherwise """ try: wsclient.delete_objects( [{ 'wsid' : wsid, 'objid' : objid }] ) except WorkspaceClient.ServerError, e: raise e # return False return True # Write an object to the workspace, takes the workspace id, an object of the # type workspace.ObjectSaveData # typedef structure { # type_string type; # UnspecifiedObject data; # obj_name name; # obj_id objid; # usermeta meta; # list<ProvenanceAction> provenance; # boolean hidden; # } ObjectSaveData; def rename_wsobj(wsclient, identity, new_name): """ Given an object's identity, change that object's name. """ try: obj_info = wsclient.rename_object({ 'obj' : identity, 'new_name' : new_name }) except WorkspaceClient.ServerError, e: raise e return dict(zip(list_objects_fields, obj_info)) def put_wsobj(wsclient, ws_id, obj): try: ws_meta = wsclient.save_objects({ 'id' : ws_id, 'objects' : [obj] }) except: raise return dict(zip(list_objects_fields,ws_meta[0])) # Tag a workspace as a project, if there is an error, let it propagate up def check_project_tag(wsclient, ws_id): try: tag = wsclient.get_object_info( [{ 'wsid' : ws_id, 'name' : ws_tag['project'] }], 0); except WorkspaceClient.ServerError, e: # If it is a not found error, create it, otherwise reraise if e.message.find('not found') >= 0 or e.message.find('No object with name') >= 0: obj_save_data = { 'name' : ws_tag['project'], 'type' :ws_tag_type, 'data' : { 'description' : 'Tag! You\'re a project!'}, 'meta' : {}, 'provenance' : [],<|fim▁hole|> ws_meta = wsclient.save_objects( { 'id' : ws_id, 'objects' : [obj_save_data]}); else: raise e return True def get_user_id(wsclient): """Grab the userid from the token in the wsclient object This is a pretty brittle way to do things, and will need to be changed, eventually. """ try: token = wsclient._headers.get('AUTHORIZATION', None) if token is None: g_log.error("auth.error No 'AUTHORIZATION' key found " "in client headers: '{}'" .format(wsclient._headers)) return None match = user_id_regex.match(token) if match: return match.group(1) else: return None except Exception, e: g_log.error("Cannot get userid: {}".format(e)) raise e def check_homews(wsclient, user_id = None): """ Helper routine to make sure that the user's home workspace is built. Putting it here so that when/if it changes we only have a single place to change things. Takes a wsclient, and if it is authenticated, extracts the user_id from the token and will check for the existence of the home workspace and create it if necessary. Will pass along any exceptions. Will also make sure that it is tagged with a workspace_meta object named "_project" returns the workspace name and workspace id as a tuple Note that parsing the token from the wsclient object is brittle and should be changed! """ if user_id is None: user_id = get_user_id(wsclient) try: homews = "%s:home" % user_id workspace_identity = { 'workspace' : homews } ws_meta = wsclient.get_workspace_info( workspace_identity) except WorkspaceClient.ServerError, e: # If it is a not found error, create it, otherwise reraise if e.message.find('not found') >= 0 or e.message.find('No workspace with name') >= 0: ws_meta = wsclient.create_workspace({ 'workspace' : homews, 'globalread' : 'n', 'description' : 'User home workspace'}) elif e.message.find('deleted') >= 0: wsclient.undelete_workspace( { 'workspace' : homews}) ws_meta = wsclient.get_workspace_info( workspace_identity) else: raise e if ws_meta: # check_project_tag(wsclient, ws_meta[0]) # return the textual name and the numeric ws_id return ws_meta[1],ws_meta[0] else: raise Exception('Unable to find or create or undelete home workspace: %s' % homews)<|fim▁end|>
'hidden' : 1}
<|file_name|>ExitProductSortingHolder.java<|end_file_name|><|fim▁begin|>package ca.ulaval.glo2004.Domain.Matrix; import ca.ulaval.glo2004.Domain.StationExitPoint; import java.io.Serializable; public class ExitProductSortingHolder implements Serializable{ public StationExitPoint exitPoint; public double value;<|fim▁hole|> } }<|fim▁end|>
public ExitProductSortingHolder(StationExitPoint exitPoint, double value){ this.exitPoint = exitPoint; this.value = value;
<|file_name|>test_consumer_groups.py<|end_file_name|><|fim▁begin|>import json import unittest import mock from django.http import HttpResponseBadRequest from base import (assert_auth_CREATE, assert_auth_READ, assert_auth_UPDATE, assert_auth_DELETE, assert_auth_EXECUTE) from pulp.server.exceptions import InvalidValue, MissingResource, MissingValue, OperationPostponed from pulp.server.managers.consumer.group import query from pulp.server.webservices.views import util from pulp.server.webservices.views.consumer_groups import (serialize, ConsumerGroupAssociateActionView, ConsumerGroupBindingView, ConsumerGroupBindingsView, ConsumerGroupContentActionView, ConsumerGroupResourceView, ConsumerGroupSearchView, ConsumerGroupUnassociateActionView, ConsumerGroupView,) class TestconsumerGroupView(unittest.TestCase): """ Test consumer groups view. """ @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_READ()) @mock.patch( 'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder') @mock.patch('pulp.server.webservices.views.consumer_groups.ConsumerGroup.get_collection') def test_get_all_consumer_groups(self, mock_collection, mock_resp): """ Test the consumer groups retrieval. """ consumer_mock = mock.MagicMock() resp = [{'id': 'foo', 'display_name': 'bar'}] consumer_mock.find.return_value = resp mock_collection.return_value = consumer_mock request = mock.MagicMock() consumer_group = ConsumerGroupView() response = consumer_group.get(request) expected_cont = [{'id': 'foo', 'display_name': 'bar', '_href': '/v2/consumer_groups/foo/'}] mock_resp.assert_called_once_with(expected_cont) self.assertTrue(response is mock_resp.return_value) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) @mock.patch('pulp.server.webservices.views.consumer_groups.generate_redirect_response') @mock.patch( 'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder') @mock.patch('pulp.server.webservices.views.consumer_groups.factory') def test_create_consumer_group(self, mock_factory, mock_resp, mock_redirect): """ Test consumer group creation. """ resp = {'id': 'foo', 'display_name': 'bar'} expected_cont = {'id': 'foo', 'display_name': 'bar', '_href': '/v2/consumer_groups/foo/'} request = mock.MagicMock() request.body = json.dumps({'id': 'foo', 'display_name': 'bar'}) mock_factory.consumer_group_manager.return_value.create_consumer_group.return_value = resp consumer_group = ConsumerGroupView() response = consumer_group.post(request) mock_resp.assert_called_once_with(expected_cont) mock_redirect.assert_called_once_with(mock_resp.return_value, expected_cont['_href']) self.assertTrue(response is mock_redirect.return_value) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) def test_create_consumer_group_invalid_param(self): """ Test consumer group creation with invalid parameters. """ request = mock.MagicMock() request.body = json.dumps({'id': 'foo', 'display_name': 'bar', 'invalid_param': 'some'}) consumer_group = ConsumerGroupView() try: response = consumer_group.post(request) except InvalidValue, response: pass else: raise AssertionError("Invalidvalue should be raised with invalid options") self.assertEqual(response.http_status_code, 400) self.assertEqual(response.error_data['property_names'], ['invalid_param']) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) def test_create_consumer_group_missing_param(self): """ Test consumer group creation with missing required group id. """ request = mock.MagicMock() request.body = json.dumps({'display_name': 'bar'}) consumer_group = ConsumerGroupView() try: response = consumer_group.post(request) except MissingValue, response: pass else: raise AssertionError("MissingValue should be raised with missing options") self.assertEqual(response.http_status_code, 400) self.assertEqual(response.error_data['property_names'], ['id']) class TestconsumerGroupResourceView(unittest.TestCase): """ Test consumer groups resource view. """ @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_DELETE()) @mock.patch('pulp.server.webservices.views.consumer_groups.generate_json_response') @mock.patch('pulp.server.webservices.views.consumer_groups.factory') def test_delete_consumer_group_resource(self, mock_factory, mock_resp): """ Test consumer group delete resource. """ mock_group_manager = mock.MagicMock() mock_factory.consumer_group_manager.return_value = mock_group_manager mock_group_manager.delete_consumer_group.return_value = None request = mock.MagicMock() consumer_group_resource = ConsumerGroupResourceView() response = consumer_group_resource.delete(request, 'test-group') mock_group_manager.delete_consumer_group.assert_called_once_with('test-group') mock_resp.assert_called_once_with(None) self.assertTrue(response is mock_resp.return_value) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_READ()) @mock.patch( 'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder') @mock.patch('pulp.server.webservices.views.consumer_groups.ConsumerGroup.get_collection') def test_get_consumer_group_resource(self, mock_collection, mock_resp): """ Test single consumer group retrieval. """ consumer_mock = mock.MagicMock() consumer_mock.find_one.return_value = {'id': 'foo'} mock_collection.return_value = consumer_mock request = mock.MagicMock() consumer_group = ConsumerGroupResourceView() response = consumer_group.get(request, 'foo') expected_cont = {'id': 'foo', '_href': '/v2/consumer_groups/foo/'} mock_resp.assert_called_once_with(expected_cont) self.assertTrue(response is mock_resp.return_value) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_READ()) @mock.patch('pulp.server.webservices.views.consumer_groups.ConsumerGroup.get_collection') def test_get_invalid_consumer_group_resource(self, mock_collection): """ Test nonexistent consumer group retrieval. """ mock_collection.return_value.find_one.return_value = None request = mock.MagicMock() consumer_group = ConsumerGroupResourceView() try: response = consumer_group.get(request, 'nonexistent_id') except MissingResource, response: pass else: raise AssertionError("MissingResource should be raised with nonexistent_group") self.assertEqual(response.http_status_code, 404) self.assertEqual(response.error_data['resources'], {'consumer_group': 'nonexistent_id'}) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_UPDATE()) @mock.patch( 'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder') @mock.patch('pulp.server.webservices.views.consumer_groups.factory') def test_update_consumer_group(self, mock_factory, mock_resp): """ Test consumer group update. """ resp = {'id': 'foo', 'display_name': 'bar'} expected_cont = {'id': 'foo', 'display_name': 'bar', '_href': '/v2/consumer_groups/foo/'} request = mock.MagicMock() request.body = json.dumps({'display_name': 'bar'}) mock_factory.consumer_group_manager.return_value.update_consumer_group.return_value = resp consumer_group = ConsumerGroupResourceView() response = consumer_group.put(request, 'foo') mock_resp.assert_called_once_with(expected_cont) self.assertTrue(response is mock_resp.return_value) class TestConsumerGroupSearchView(unittest.TestCase): """ Tests for ConsumerGroupSearchView. """ def test_class_attributes(self): """ Ensure that class attributes are set correctly. """ consumer_group_search = ConsumerGroupSearchView() self.assertTrue(isinstance(consumer_group_search.manager, query.ConsumerGroupQueryManager)) self.assertEqual(consumer_group_search.response_builder, util.generate_json_response_with_pulp_encoder) self.assertEqual(consumer_group_search.serializer, serialize) class TestConsumerGroupAssociateActionView(unittest.TestCase): """ Tests consumer group membership. """ @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_EXECUTE()) @mock.patch( 'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder') @mock.patch('pulp.server.webservices.views.consumer_groups.factory') def test_cons_group_association_view(self, mock_factory, mock_resp): """ Test consumer group associate a consumer. """ grp = {'id': 'my-group', 'consumer_ids': ['c1']} mock_factory.consumer_group_manager.return_value.associate.return_value = 'ok' mock_factory.consumer_group_query_manager.return_value.get_group.return_value = grp request = mock.MagicMock() request.body = json.dumps({'criteria': {'filters': {'id': 'c1'}}}) consumer_group_associate = ConsumerGroupAssociateActionView() response = consumer_group_associate.post(request, 'my-group') mock_resp.assert_called_once_with(['c1']) self.assertTrue(response is mock_resp.return_value) class TestConsumerGroupUnassociateActionView(unittest.TestCase): """ Tests consumer group membership. """ @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_EXECUTE()) @mock.patch( 'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder') @mock.patch('pulp.server.webservices.views.consumer_groups.factory') def test_cons_group_unassociation_view(self, mock_factory, mock_resp): """ Test consumer group unassociate a consumer. """ grp = {'id': 'my-group', 'consumer_ids': []} mock_factory.consumer_group_manager.return_value.unassociate.return_value = 'ok' mock_factory.consumer_group_query_manager.return_value.get_group.return_value = grp request = mock.MagicMock() request.body = json.dumps({'criteria': {'filters': {'id': 'c1'}}}) consumer_group_unassociate = ConsumerGroupUnassociateActionView() response = consumer_group_unassociate.post(request, 'my-group') mock_resp.assert_called_once_with([]) self.assertTrue(response is mock_resp.return_value) class TestConsumerGroupBindingsView(unittest.TestCase): """ Represents consumer group binding. """ @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) @mock.patch('pulp.server.webservices.views.consumer_groups.factory') @mock.patch('pulp.server.webservices.views.consumer_groups.model.Distributor.objects') @mock.patch('pulp.server.webservices.views.consumer_groups.model.Repository.objects') def test_verify_group_resources_repo(self, mock_repo_qs, mock_dist_qs, mock_factory): """ Test verify group resources with repo missing. """ mock_factory.consumer_group_query_manager.return_value.get_group.return_value = 'test-group' mock_repo_qs().first.return_value = None mock_dist_qs.get_or_404.side_effect = MissingResource request = mock.MagicMock() request.body = json.dumps({'repo_id': 'xxx', 'distributor_id': 'yyy'}) bind_view = ConsumerGroupBindingsView() try: response = bind_view.post(request, 'test-group') except InvalidValue, response: pass else: raise AssertionError("InvalidValue should be raised with nonexistent resources") self.assertEqual(response.http_status_code, 400) self.assertEqual(response.error_data['property_names'], ['repo_id', 'distributor_id']) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) @mock.patch('pulp.server.webservices.views.consumer_groups.factory') @mock.patch('pulp.server.webservices.views.consumer_groups.model.Distributor.objects') @mock.patch('pulp.server.webservices.views.consumer_groups.model.Repository.objects') def test_verify_group_resources_distributor(self, mock_repo_qs, mock_dist_qs, mock_f): """ Test verify group resources with distributor missing. """ mock_f.consumer_group_query_manager.return_value.get_group.return_value = 'test' mock_repo_qs.first.return_value = 'xxx' mock_dist_qs.get_or_404.side_effect = MissingResource request = mock.MagicMock() request.body = json.dumps({'repo_id': 'xxx', 'distributor_id': 'yyy'}) bind_view = ConsumerGroupBindingsView() try: response = bind_view.post(request, 'test-group') except InvalidValue, response: pass else:<|fim▁hole|> @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) @mock.patch('pulp.server.webservices.views.consumer_groups.model.Repository.objects') @mock.patch('pulp.server.webservices.views.consumer_groups.model.Distributor.objects') @mock.patch('pulp.server.webservices.views.consumer_groups.factory') def test_verify_group_resources_group(self, mock_f, mock_dist_qs, mock_repo_qs): """ Test verify group resources with group id missing. """ mock_f.consumer_group_query_manager.return_value.get_group.side_effect = MissingResource mock_repo_qs.first.return_value = 'xxx' mock_dist_qs.get_or_404.return_value = 'yyy' request = mock.MagicMock() request.body = json.dumps({'repo_id': 'xxx', 'distributor_id': 'yyy'}) bind_view = ConsumerGroupBindingsView() try: response = bind_view.post(request, 'test-group') except MissingResource, response: pass else: raise AssertionError("MissingResource should be raised with nonexistent resources") self.assertEqual(response.http_status_code, 404) self.assertEqual(response.error_data['resources'], {'group_id': 'test-group'}) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) @mock.patch('pulp.server.webservices.views.consumer_groups.bind') @mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources') def test_create_binding(self, mock_resources, mock_bind): """ Test bind consumer group to a repo. """ mock_resources.return_value = {} request = mock.MagicMock() request.body = json.dumps({'repo_id': 'xxx', 'distributor_id': 'yyy'}) bind_view = ConsumerGroupBindingsView() self.assertRaises(OperationPostponed, bind_view.post, request, 'test-group') bind_args_tuple = ('test-group', 'xxx', 'yyy', True, None, {}) mock_bind.apply_async.assert_called_once_with(bind_args_tuple) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) @mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources') def test_create_binding_with_missing_group_id(self, mock_resources): """ Test bind consumer group to a repo when group id missing. """ mock_resources.return_value = {'group_id': 'nonexistent_id'} request = mock.MagicMock() request.body = json.dumps({}) bind_view = ConsumerGroupBindingsView() try: response = bind_view.post(request, 'nonexistent_id') except MissingResource, response: pass else: raise AssertionError("MissingResource should be raised with nonexistent_group") self.assertEqual(response.http_status_code, 404) self.assertEqual(response.error_data['resources'], {'group_id': 'nonexistent_id'}) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) @mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources') def test_create_binding_with_missing_repo_id(self, mock_resources): """ Test bind consumer group to a repo when repo id is missing. """ mock_resources.return_value = {'repo_id': 'nonexistent_id'} request = mock.MagicMock() request.body = json.dumps({}) bind_view = ConsumerGroupBindingsView() try: response = bind_view.post(request, 'test-group') except InvalidValue, response: pass else: raise AssertionError("InvalidValue should be raised with nonexistent_repo") self.assertEqual(response.http_status_code, 400) self.assertEqual(response.error_data['property_names'], ['repo_id']) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) @mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources') def test_create_binding_with_invalid_param(self, mock_resources): """ Test bind consumer group to a repo witn invalid parameters. """ mock_resources.return_value = {'invalid_param': 'foo'} request = mock.MagicMock() request.body = json.dumps({}) bind_view = ConsumerGroupBindingsView() try: response = bind_view.post(request, 'test-group') except InvalidValue, response: pass else: raise AssertionError("Invalidvalue should be raised with invalid options") self.assertEqual(response.http_status_code, 400) self.assertEqual(response.error_data['property_names'], ['invalid_param']) class TestConsumerGroupBindingView(unittest.TestCase): """ Represents a specific consumer group binding. """ @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_DELETE()) @mock.patch('pulp.server.webservices.views.consumer_groups.unbind') @mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources') def test_delete_binding(self, mock_resources, mock_unbind): """ Test consumer group binding removal. """ mock_resources.return_value = {} request = mock.MagicMock() unbind_view = ConsumerGroupBindingView() self.assertRaises(OperationPostponed, unbind_view.delete, request, "consumer_group_id", "repo_id", "distributor_id") unbind_args_tuple = ("consumer_group_id", "repo_id", "distributor_id", {}) mock_unbind.apply_async.assert_called_once_with(unbind_args_tuple) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_DELETE()) @mock.patch('pulp.server.webservices.views.consumer_groups.verify_group_resources') def test_delete_non_existent_binding(self, mock_resources): """ Test consumer group nonexistent binding removal. """ mock_resources.return_value = {'repo_id': 'no_such_repo'} request = mock.MagicMock() unbind_view = ConsumerGroupBindingView() try: response = unbind_view.delete(request, 'test-group', 'no_such_repo', 'dist_id') except MissingResource, response: pass else: raise AssertionError("MissingResource should be raised with missing options") self.assertEqual(response.http_status_code, 404) self.assertEqual(response.error_data['resources'], {'repo_id': 'no_such_repo'}) class TestConsumerGroupContentActionView(unittest.TestCase): """ Test Consumer group content manipulation. """ @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) def test_consumer_group_bad_request_content(self): """ Test consumer group invalid content action. """ request = mock.MagicMock() request.body = json.dumps('') consumer_group_content = ConsumerGroupContentActionView() response = consumer_group_content.post(request, 'my-group', 'no_such_action') self.assertTrue(isinstance(response, HttpResponseBadRequest)) self.assertEqual(response.status_code, 400) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) @mock.patch('pulp.server.webservices.views.consumer_groups.factory') def test_consumer_group_content_install(self, mock_factory): """ Test consumer group content installation. """ mock_factory.consumer_group_manager.return_value.install_content.return_value = 'ok' request = mock.MagicMock() request.body = json.dumps({"units": [], "options": {}}) consumer_group_content = ConsumerGroupContentActionView() self.assertRaises(OperationPostponed, consumer_group_content.post, request, 'my-group', 'install') mock_factory.consumer_group_manager().install_content.assert_called_once_with( 'my-group', [], {}) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) @mock.patch('pulp.server.webservices.views.consumer_groups.factory') def test_consumer_group_content_update(self, mock_factory): """ Test consumer group content update. """ mock_factory.consumer_group_manager.return_value.update_content.return_value = 'ok' request = mock.MagicMock() request.body = json.dumps({"units": [], "options": {}}) consumer_group_content = ConsumerGroupContentActionView() self.assertRaises(OperationPostponed, consumer_group_content.post, request, 'my-group', 'update') mock_factory.consumer_group_manager().update_content.assert_called_once_with( 'my-group', [], {}) @mock.patch('pulp.server.webservices.views.decorators._verify_auth', new=assert_auth_CREATE()) @mock.patch('pulp.server.webservices.views.consumer_groups.factory') def test_consumer_group_content_uninstall(self, mock_factory): """ Test consumer group content uninstall. """ mock_factory.consumer_group_manager.return_value.uninstall_content.return_value = 'ok' request = mock.MagicMock() request.body = json.dumps({"units": [], "options": {}}) consumer_group_content = ConsumerGroupContentActionView() self.assertRaises(OperationPostponed, consumer_group_content.post, request, 'my-group', 'uninstall') mock_factory.consumer_group_manager().uninstall_content.assert_called_once_with( 'my-group', [], {})<|fim▁end|>
raise AssertionError("InvalidValue should be raised with nonexistent resources") self.assertEqual(response.http_status_code, 400) self.assertEqual(response.error_data['property_names'], ['distributor_id'])
<|file_name|>S15.5.4.12_A11.js<|end_file_name|><|fim▁begin|>// Copyright 2009 the Sputnik authors. All rights reserved.<|fim▁hole|>// This code is governed by the BSD license found in the LICENSE file. /** * The length property of the search method is 1 * * @path ch15/15.5/15.5.4/15.5.4.12/S15.5.4.12_A11.js * @description Checking String.prototype.search.length */ ////////////////////////////////////////////////////////////////////////////// //CHECK#1 if (!(String.prototype.search.hasOwnProperty("length"))) { $ERROR('#1: String.prototype.search.hasOwnProperty("length") return true. Actual: '+String.prototype.search.hasOwnProperty("length")); } // ////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////// //CHECK#2 if (String.prototype.search.length !== 1) { $ERROR('#2: String.prototype.search.length === 1. Actual: '+String.prototype.search.length ); } // //////////////////////////////////////////////////////////////////////////////<|fim▁end|>
<|file_name|>configs.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- #---------------------------------------------------------------------------- # ModRana config files handling #---------------------------------------------------------------------------- # Copyright 2012, Martin Kolman # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. #--------------------------------------------------------------------------- import os import shutil from configobj import ConfigObj import logging log = logging.getLogger("core.config") CONFIGS = ["map_config.conf", "user_config.conf"] class Configs(object): def __init__(self, modrana): self.modrana = modrana self.paths = modrana.paths self.userConfig = {} self.mapConfig = {} # check if config files exist self.checkConfigFilesExist() def checkConfigFilesExist(self): """ assure that configuration files are available in the profile folder - provided the default configuration files exist and that the profile folder exists and is writable """ profilePath = self.modrana.paths.getProfilePath() for config in CONFIGS: configPath = os.path.join(profilePath, config) if not os.path.exists(configPath): try: source = os.path.join("data/default_configuration_files", config) log.info(" ** copying default configuration file to profile folder") log.info(" ** from: %s", source) log.info(" ** to: %s", configPath)<|fim▁hole|> except Exception: log.exception("copying default configuration file to profile folder failed") def upgradeConfigFiles(self): """ upgrade config files, if needed """ upgradeCount = 0 profilePath = self.modrana.paths.getProfilePath() log.info("upgrading modRana configuration files in %s", profilePath) # first check the configs actually exist self.checkConfigFilesExist() for config in CONFIGS: # load default config defaultConfigPath = os.path.join("data/default_configuration_files", config) installedConfigPath = os.path.join(profilePath, config) try: defaultRev = int(ConfigObj(defaultConfigPath).get("revision", 0)) installedRev = int(ConfigObj(installedConfigPath).get("revision", 0)) if defaultRev > installedRev: # is installed config is outdated ? log.info('config file %s is outdated, upgrading', config) # rename installed config as the user might have modified it newName = "%s_old_revision_%d" % (config, installedRev) newPath = os.path.join(profilePath, newName) shutil.move(installedConfigPath, newPath) log.info('old config file renamed to %s' % newName) # install the (newer) default config shutil.copy(defaultConfigPath, profilePath) # update upgrade counter upgradeCount += 1 except Exception: log.exception("upgrading config file: %s failed", config) if upgradeCount: log.info("%d configuration files upgraded", upgradeCount) else: log.info("no configuration files needed upgrade") def loadAll(self): """ load all configuration files """ self.loadMapConfig() self.loadUserConfig() def getUserConfig(self): return self.userConfig def loadUserConfig(self): """load the user oriented configuration file.""" path = os.path.join(self.modrana.paths.getProfilePath(), "user_config.conf") try: config = ConfigObj(path) if 'enabled' in config: if config['enabled'] == 'True': self.userConfig = config except Exception: msg = "loading user_config.conf failed, check the syntax\n" \ "and if the config file is present in the modRana profile directory" log.exception(msg) def getMapConfig(self): """ get the "raw" map config """ return self.mapConfig def loadMapConfig(self): """ load the map configuration file """ configVariables = { 'label': 'label', 'url': 'tiles', 'max_zoom': 'maxZoom', 'min_zoom': 'minZoom', 'type': 'type', 'folder_prefix': 'folderPrefix', 'coordinates': 'coordinates', } def allNeededIn(needed, layerDict): """ check if all required values are filled in """ # TODO: optimize this ? for key in needed: if key in layerDict: continue else: return False return True mapConfigPath = os.path.join(self.modrana.paths.getProfilePath(), 'map_config.conf') # check if the map configuration file is installed if not os.path.exists(mapConfigPath): # nothing in profile folder -> try to use the default config log.info("no config in profile folder, using default map layer configuration file") mapConfigPath = os.path.join("data/default_configuration_files", 'map_config.conf') if not os.path.exists(mapConfigPath): # no map layer config available log.info("map layer configuration file not available") return False try: self.mapConfig = ConfigObj(mapConfigPath) except Exception: log.exception("loading map_config.conf failed") return False return True def getUserAgent(self): """return the default modRana User-Agent""" #debugging: # return "Mozilla/5.0 (compatible; MSIE 5.5; Linux)" #TODO: setting from configuration file, CLI & interface return "modRana flexible GPS navigation system (compatible; Linux)"<|fim▁end|>
shutil.copy(source, configPath) log.info(" ** default config file copying DONE")