prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>config.ts<|end_file_name|><|fim▁begin|>export const CONFIG = { github: { token: process.env.GITHUB_TOKEN<|fim▁hole|> } };<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>''' Provides a command line application for Bokeh. The following subcommands are available: ''' from __future__ import absolute_import def _build_docstring(): global __doc__ from . import subcommands for cls in subcommands.all:<|fim▁hole|> _build_docstring() del _build_docstring<|fim▁end|>
__doc__ += "%8s : %s\n" % (cls.name, cls.help)
<|file_name|>InfSourceSectionParser.py<|end_file_name|><|fim▁begin|>## @file # This file contained the parser for [Sources] sections in INF file # # Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR> # # This program and the accompanying materials are licensed and made available # under the terms and conditions of the BSD License which accompanies this # distribution. The full text of the license may be found at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. # ''' InfSourceSectionParser ''' ## # Import Modules # import Logger.Log as Logger from Logger import StringTable as ST from Logger.ToolError import FORMAT_INVALID from Parser.InfParserMisc import InfExpandMacro from Library import DataType as DT from Library.Parsing import MacroParser from Library.Misc import GetSplitValueList from Object.Parser.InfCommonObject import InfLineCommentObject from Parser.InfParserMisc import InfParserSectionRoot class InfSourceSectionParser(InfParserSectionRoot): ## InfSourceParser # # def InfSourceParser(self, SectionString, InfSectionObject, FileName): SectionMacros = {} ValueList = [] SourceList = [] StillCommentFalg = False HeaderComments = [] LineComment = None SectionContent = '' for Line in SectionString: SrcLineContent = Line[0] SrcLineNo = Line[1] if SrcLineContent.strip() == '': continue # # Found Header Comments # if SrcLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT): # # Last line is comments, and this line go on. # if StillCommentFalg: HeaderComments.append(Line) SectionContent += SrcLineContent + DT.END_OF_LINE continue # # First time encounter comment # else: # # Clear original data # HeaderComments = [] HeaderComments.append(Line) StillCommentFalg = True SectionContent += SrcLineContent + DT.END_OF_LINE continue else: StillCommentFalg = False if len(HeaderComments) >= 1: LineComment = InfLineCommentObject() <|fim▁hole|> # # Find Tail comment. # if SrcLineContent.find(DT.TAB_COMMENT_SPLIT) > -1: TailComments = SrcLineContent[SrcLineContent.find(DT.TAB_COMMENT_SPLIT):] SrcLineContent = SrcLineContent[:SrcLineContent.find(DT.TAB_COMMENT_SPLIT)] if LineComment is None: LineComment = InfLineCommentObject() LineComment.SetTailComments(TailComments) # # Find Macro # Name, Value = MacroParser((SrcLineContent, SrcLineNo), FileName, DT.MODEL_EFI_SOURCE_FILE, self.FileLocalMacros) if Name is not None: SectionMacros[Name] = Value LineComment = None HeaderComments = [] continue # # Replace with Local section Macro and [Defines] section Macro. # SrcLineContent = InfExpandMacro(SrcLineContent, (FileName, SrcLineContent, SrcLineNo), self.FileLocalMacros, SectionMacros) TokenList = GetSplitValueList(SrcLineContent, DT.TAB_VALUE_SPLIT, 4) ValueList[0:len(TokenList)] = TokenList # # Store section content string after MACRO replaced. # SectionContent += SrcLineContent + DT.END_OF_LINE SourceList.append((ValueList, LineComment, (SrcLineContent, SrcLineNo, FileName))) ValueList = [] LineComment = None TailComments = '' HeaderComments = [] continue # # Current section archs # ArchList = [] for Item in self.LastSectionHeaderContent: if Item[1] not in ArchList: ArchList.append(Item[1]) InfSectionObject.SetSupArchList(Item[1]) InfSectionObject.SetAllContent(SectionContent) if not InfSectionObject.SetSources(SourceList, Arch = ArchList): Logger.Error('InfParser', FORMAT_INVALID, ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Sources]"), File=FileName, Line=Item[3])<|fim▁end|>
LineCommentContent = '' for Item in HeaderComments: LineCommentContent += Item[0] + DT.END_OF_LINE LineComment.SetHeaderComments(LineCommentContent)
<|file_name|>operations.py<|end_file_name|><|fim▁begin|># This file is part of Indico. # Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN). # # Indico is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 3 of the # License, or (at your option) any later version. # # Indico is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Indico; if not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals from datetime import timedelta from flask import session from indico.core import signals from indico.core.db import db from indico.core.db.sqlalchemy.util.session import no_autoflush from indico.modules.events.contributions import logger from indico.modules.events.contributions.models.contributions import Contribution from indico.modules.events.contributions.models.persons import ContributionPersonLink from indico.modules.events.contributions.models.subcontributions import SubContribution from indico.modules.events.logs.models.entries import EventLogKind, EventLogRealm from indico.modules.events.timetable.operations import (delete_timetable_entry, schedule_contribution, update_timetable_entry) from indico.modules.events.util import set_custom_fields def _ensure_consistency(contrib): """Unschedule contribution if not consistent with timetable A contribution that has no session assigned, may not be scheduled inside a session. A contribution that has a session assigned may only be scheduled inside a session block associated with that session, and that session block must match the session block of the contribution. :return: A bool indicating whether the contribution has been unscheduled to preserve consistency. """ entry = contrib.timetable_entry if entry is None: return False if entry.parent_id is None and (contrib.session is not None or contrib.session_block is not None): # Top-level entry but we have a session/block set delete_timetable_entry(entry, log=False) return True elif entry.parent_id is not None: parent = entry.parent # Nested entry but no or a different session/block set if parent.session_block.session != contrib.session or parent.session_block != contrib.session_block: delete_timetable_entry(entry, log=False) return True return False def create_contribution(event, contrib_data, custom_fields_data=None, session_block=None, extend_parent=False): start_dt = contrib_data.pop('start_dt', None) contrib = Contribution(event=event) contrib.populate_from_dict(contrib_data) if start_dt is not None: schedule_contribution(contrib, start_dt=start_dt, session_block=session_block, extend_parent=extend_parent) if custom_fields_data: set_custom_fields(contrib, custom_fields_data) db.session.flush() signals.event.contribution_created.send(contrib) logger.info('Contribution %s created by %s', contrib, session.user) contrib.event.log(EventLogRealm.management, EventLogKind.positive, 'Contributions', 'Contribution "{}" has been created'.format(contrib.title), session.user) return contrib @no_autoflush def update_contribution(contrib, contrib_data, custom_fields_data=None): """Update a contribution :param contrib: The `Contribution` to update :param contrib_data: A dict containing the data to update :param custom_fields_data: A dict containing the data for custom fields. :return: A dictionary containing information related to the update. `unscheduled` will be true if the modification resulted in the contribution being unscheduled. In this<|fim▁hole|> """ rv = {'unscheduled': False, 'undo_unschedule': None} current_session_block = contrib.session_block start_dt = contrib_data.pop('start_dt', None) if start_dt is not None: update_timetable_entry(contrib.timetable_entry, {'start_dt': start_dt}) changes = contrib.populate_from_dict(contrib_data) if custom_fields_data: changes.update(set_custom_fields(contrib, custom_fields_data)) if 'session' in contrib_data: timetable_entry = contrib.timetable_entry if timetable_entry is not None and _ensure_consistency(contrib): rv['unscheduled'] = True rv['undo_unschedule'] = {'start_dt': timetable_entry.start_dt.isoformat(), 'contribution_id': contrib.id, 'session_block_id': current_session_block.id if current_session_block else None, 'force': True} db.session.flush() if changes: signals.event.contribution_updated.send(contrib, changes=changes) logger.info('Contribution %s updated by %s', contrib, session.user) contrib.event.log(EventLogRealm.management, EventLogKind.change, 'Contributions', 'Contribution "{}" has been updated'.format(contrib.title), session.user) return rv def delete_contribution(contrib): contrib.is_deleted = True if contrib.timetable_entry is not None: delete_timetable_entry(contrib.timetable_entry, log=False) db.session.flush() signals.event.contribution_deleted.send(contrib) logger.info('Contribution %s deleted by %s', contrib, session.user) contrib.event.log(EventLogRealm.management, EventLogKind.negative, 'Contributions', 'Contribution "{}" has been deleted'.format(contrib.title), session.user) def create_subcontribution(contrib, data): subcontrib = SubContribution() subcontrib.populate_from_dict(data) contrib.subcontributions.append(subcontrib) db.session.flush() signals.event.subcontribution_created.send(subcontrib) logger.info('Subcontribution %s created by %s', subcontrib, session.user) subcontrib.event.log(EventLogRealm.management, EventLogKind.positive, 'Subcontributions', 'Subcontribution "{}" has been created'.format(subcontrib.title), session.user) return subcontrib def update_subcontribution(subcontrib, data): subcontrib.populate_from_dict(data) db.session.flush() signals.event.subcontribution_updated.send(subcontrib) logger.info('Subcontribution %s updated by %s', subcontrib, session.user) subcontrib.event.log(EventLogRealm.management, EventLogKind.change, 'Subcontributions', 'Subcontribution "{}" has been updated'.format(subcontrib.title), session.user) def delete_subcontribution(subcontrib): subcontrib.is_deleted = True db.session.flush() signals.event.subcontribution_deleted.send(subcontrib) logger.info('Subcontribution %s deleted by %s', subcontrib, session.user) subcontrib.event.log(EventLogRealm.management, EventLogKind.negative, 'Subcontributions', 'Subcontribution "{}" has been deleted'.format(subcontrib.title), session.user) @no_autoflush def create_contribution_from_abstract(abstract, contrib_session=None): event = abstract.event contrib_person_links = set() person_link_attrs = {'_title', 'address', 'affiliation', 'first_name', 'last_name', 'phone', 'author_type', 'is_speaker', 'display_order'} for abstract_person_link in abstract.person_links: link = ContributionPersonLink(person=abstract_person_link.person) link.populate_from_attrs(abstract_person_link, person_link_attrs) contrib_person_links.add(link) duration = contrib_session.default_contribution_duration if contrib_session else timedelta(minutes=15) custom_fields_data = {'custom_{}'.format(field_value.contribution_field.id): field_value.data for field_value in abstract.field_values} return create_contribution(event, {'friendly_id': abstract.friendly_id, 'title': abstract.title, 'duration': duration, 'description': abstract.description, 'type': abstract.accepted_contrib_type, 'track': abstract.accepted_track, 'session': contrib_session, 'person_link_data': {link: True for link in contrib_person_links}}, custom_fields_data=custom_fields_data)<|fim▁end|>
case `undo_unschedule` contains the necessary data to re-schedule it (undoing the session change causing it to be unscheduled)
<|file_name|>TreeNodeProject.java<|end_file_name|><|fim▁begin|>/* * Orbit, a versatile image analysis software for biological image-based quantification. * Copyright (C) 2009 - 2017 Idorsia Pharmaceuticals Ltd., Hegenheimermattweg 91, CH-4123 Allschwil, Switzerland. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ package com.actelion.research.orbit.imageprovider.tree; import com.actelion.research.orbit.beans.RawData; import com.actelion.research.orbit.imageprovider.ImageProviderOmero; import com.actelion.research.orbit.utils.Logger; import java.util.ArrayList; import java.util.List; public class TreeNodeProject extends AbstractOrbitTreeNode { private static Logger logger = Logger.getLogger(TreeNodeProject.class); private RawData project = null; private ImageProviderOmero imageProviderOmero; public TreeNodeProject(ImageProviderOmero imageProviderOmero, RawData project) { this.imageProviderOmero = imageProviderOmero; this.project = project; }<|fim▁hole|> List<TreeNodeProject> nodeList = new ArrayList<>(); int group = -1; if (parent!=null && parent instanceof TreeNodeGroup) { TreeNodeGroup groupNode = (TreeNodeGroup) parent; RawData rdGroup = (RawData) groupNode.getIdentifier(); group = rdGroup.getRawDataId(); } List<RawData> rdList = loadProjects(group); for (RawData rd : rdList) { nodeList.add(new TreeNodeProject(imageProviderOmero, rd)); } return nodeList; } @Override public boolean isChildOf(Object parent) { return parent instanceof TreeNodeGroup; } @Override public Object getIdentifier() { return project; } @Override public String toString() { return project != null ? project.getBioLabJournal() : ""; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TreeNodeProject that = (TreeNodeProject) o; return project != null ? project.equals(that.project) : that.project == null; } @Override public int hashCode() { return project != null ? project.hashCode() : 0; } private List<RawData> loadProjects(int group) { return imageProviderOmero.loadProjects(group); } }<|fim▁end|>
@Override public synchronized List<TreeNodeProject> getNodes(AbstractOrbitTreeNode parent) {
<|file_name|>ClassTemplateBuilder.java<|end_file_name|><|fim▁begin|>package twg2.template.codeTemplate; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * @param <T> the {@link ClassInfo} to build * * @author TeamworkGuy2 * @since 2015-1-24 */ public final class ClassTemplateBuilder<T extends ClassInfo> { private T tmpl; public ClassTemplateBuilder(T tmpl) { this.tmpl = tmpl; } public ClassTemplateBuilder(T tmpl, String className, String packageName) { this.tmpl = tmpl; tmpl.setClassName(className); tmpl.setPackageName(packageName); } public ClassTemplateBuilder<T> setPackageName(String packageName) { tmpl.setPackageName(packageName); return this; } /** Alias for {@link #addImportStatement(Class...)} */ @SafeVarargs public final ClassTemplateBuilder<T> imports(Class<?>... importStatements) { return addImportStatement(importStatements); } @SafeVarargs public final ClassTemplateBuilder<T> addImportStatement(Class<?>... importStatements) { List<String> statements = tmpl.getImportStatements(); if(statements == null) { statements = new ArrayList<>(); tmpl.setImportStatements(statements); } if(importStatements == null) { return this; } for(Class<?> importStatement : importStatements) { statements.add(importStatement.getCanonicalName()); } return this; } /** Alias for {@link #addImportStatement(String...)} */ @SafeVarargs public final ClassTemplateBuilder<T> imports(String... importStatements) { return addImportStatement(importStatements); } @SafeVarargs public final ClassTemplateBuilder<T> addImportStatement(String... importStatements) { List<String> imports = tmpl.getImportStatements(); if(imports == null) { imports = new ArrayList<>(); tmpl.setImportStatements(imports); } if(importStatements == null) { return this; } for(String importStatement : importStatements) { imports.add(importStatement); } return this; } public ClassTemplateBuilder<T> setClassModifier(String classAccessModifier) { tmpl.setClassModifier(classAccessModifier); return this; } public ClassTemplateBuilder<T> setClassType(String classType) { tmpl.setClassType(classType); return this; } public ClassTemplateBuilder<T> addTypeParameters(Iterable<? extends Map.Entry<String, String>> classParameterNamesAndTypes) { for(Map.Entry<String, String> classParam : classParameterNamesAndTypes) { addTypeParameter(classParam.getKey(), classParam.getValue()); } return this; } <|fim▁hole|> public ClassTemplateBuilder<T> addTypeParameter(String classParameterName, String classTypeParameterDefinition) { if(tmpl.getClassTypeParameterDefinitions() == null) { tmpl.setClassTypeParameterDefinitions(new ArrayList<>()); } if(tmpl.getClassTypeParameterNames() == null) { tmpl.setClassTypeParameterNames(new ArrayList<>()); } tmpl.getClassTypeParameterNames().add(classParameterName); tmpl.getClassTypeParameterDefinitions().add(classTypeParameterDefinition); return this; } public ClassTemplateBuilder<T> setClassName(String className) { tmpl.setClassName(className); return this; } /** Alias for {@link #setExtendClassName(Class)} */ public ClassTemplateBuilder<T> extend(Class<?> extendClassName) { return setExtendClassName(extendClassName); } public ClassTemplateBuilder<T> setExtendClassName(Class<?> extendClassName) { tmpl.setExtendClassName(extendClassName.getCanonicalName().replace("java.lang.", "")); return this; } /** Alias for {@link #setExtendClassName(String)} */ public ClassTemplateBuilder<T> extend(String extendClassName) { return setExtendClassName(extendClassName); } public ClassTemplateBuilder<T> setExtendClassName(String extendClassName) { tmpl.setExtendClassName(extendClassName); return this; } /** Alias for {@link #addImplementClassNames(String...)} */ @SafeVarargs public final ClassTemplateBuilder<T> implement(String... implementClassNames) { return addImplementClassNames(implementClassNames); } @SafeVarargs public final ClassTemplateBuilder<T> addImplementClassNames(String... implementClassNames) { List<String> implementNames = tmpl.getImplementClassNames(); if(implementNames == null) { implementNames = new ArrayList<>(); tmpl.setImplementClassNames(implementNames); } if(implementClassNames == null) { return this; } for(String implementClassName : implementClassNames) { implementNames.add(implementClassName); } return this; } /** Alias for {@link #addImplementClassNames(Class...)} */ @SafeVarargs public final ClassTemplateBuilder<T> implement(Class<?>... implementClassNames) { return addImplementClassNames(implementClassNames); } @SafeVarargs public final ClassTemplateBuilder<T> addImplementClassNames(Class<?>... implementClassNames) { List<String> implementNames = tmpl.getImplementClassNames(); if(implementNames == null) { implementNames = new ArrayList<>(); tmpl.setImplementClassNames(implementNames); } if(implementClassNames == null) { return this; } for(Class<?> implementClassName : implementClassNames) { implementNames.add(implementClassName.getCanonicalName().replace("java.lang.", "")); } return this; } public T getTemplate() { return tmpl; } public static ClassTemplateBuilder<ClassInfo> newInst() { return new ClassTemplateBuilder<ClassInfo>(new ClassTemplate()); } public static <T extends ClassInfo> ClassTemplateBuilder<T> of(T inst) { return new ClassTemplateBuilder<>(inst); } public static <T extends ClassInfo> ClassTemplateBuilder<T> of(T inst, String className, String packageName) { return new ClassTemplateBuilder<>(inst, className, packageName); } public static ClassTemplateBuilder<ClassTemplate> of(String className) { return of(className, null); } public static ClassTemplateBuilder<ClassTemplate> of(String className, String packageName) { return new ClassTemplateBuilder<>(new ClassTemplate(), className, packageName); } }<|fim▁end|>
<|file_name|>discount.py<|end_file_name|><|fim▁begin|><|fim▁hole|># - Node (administer inline with nodes) # - Bulk amounts on nodes # - User # - Group of users # - Order (this is more-or-less a voucher) # - Shipping costs # Possible amounts: # - Percentage # - Fixed amount # Flag indicating if a discount can be combined with other discounts. # Boolean "offer" to include in list of offers. Default to true if discount is at node level. # Save all applied discounts when ordering in a ManyToMany relationship with Order.<|fim▁end|>
# Possible discounts:
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals from django.db import models from django.core.paginator import Paginator, PageNotAnInteger from wagtail.wagtailcore.models import Page from wagtail.wagtailcore.fields import RichTextField from wagtail.wagtailadmin.edit_handlers import FieldPanel from wagtail.wagtailimages.edit_handlers import ImageChooserPanel from wagtail.wagtailsearch import index class EventPage(Page): date = models.DateField("Event Date", blank=True) time = models.TimeField("Time", blank=True) location = models.CharField(max_length=250, blank=True) address = models.CharField(max_length=250, blank=True) intro = models.CharField(max_length=250, blank=True) body = RichTextField(blank=True) main_image = models.ForeignKey( 'wagtailimages.Image', null=True, blank=True, on_delete=models.SET_NULL, related_name='+' ) search_fields = Page.search_fields + ( index.SearchField('intro'), index.SearchField('body'), ) content_panels = Page.content_panels + [ FieldPanel('date'), FieldPanel('time'), ImageChooserPanel('main_image'), FieldPanel('location'), FieldPanel('address'), FieldPanel('intro'), FieldPanel('body', classname="full"), ] <|fim▁hole|> intro = RichTextField(blank=True) def get_context(self, request): context = super(EventIndexPage, self).get_context(request) context['event_entries'] = EventPage.objects.child_of(self).live() return context content_panels = Page.content_panels + [ FieldPanel('intro'), ]<|fim▁end|>
class EventIndexPage(Page):
<|file_name|>handjoob.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright(C) 2013 Bezleputh # # This file is part of weboob. # # weboob is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # weboob is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with weboob. If not, see <http://www.gnu.org/licenses/>. import sys from weboob.capabilities.job import ICapJob from weboob.tools.application.repl import ReplApplication, defaultcount from weboob.tools.application.formatters.iformatter import IFormatter, PrettyFormatter __all__ = ['Handjoob'] class JobAdvertFormatter(IFormatter): MANDATORY_FIELDS = ('id', 'url', 'publication_date', 'title') def format_obj(self, obj, alias): result = u'%s%s%s\n' % (self.BOLD, obj.title, self.NC) result += 'url: %s\n' % obj.url if hasattr(obj, 'publication_date') and obj.publication_date: result += 'Publication date : %s\n' % obj.publication_date.strftime('%Y-%m-%d') if hasattr(obj, 'place') and obj.place: result += 'Location: %s\n' % obj.place if hasattr(obj, 'society_name') and obj.society_name: result += 'Society : %s\n' % obj.society_name if hasattr(obj, 'job_name') and obj.job_name: result += 'Job name : %s\n' % obj.job_name if hasattr(obj, 'contract_type') and obj.contract_type: result += 'Contract : %s\n' % obj.contract_type if hasattr(obj, 'pay') and obj.pay: result += 'Pay : %s\n' % obj.pay if hasattr(obj, 'formation') and obj.formation: result += 'Formation : %s\n' % obj.formation if hasattr(obj, 'experience') and obj.experience: result += 'Experience : %s\n' % obj.experience if hasattr(obj, 'description') and obj.description: result += 'Description : %s\n' % obj.description return result class JobAdvertListFormatter(PrettyFormatter): MANDATORY_FIELDS = ('id', 'title') def get_title(self, obj): return '%s' % (obj.title) def get_description(self, obj): result = u'' if hasattr(obj, 'publication_date') and obj.publication_date: result += '\tPublication date : %s\n' % obj.publication_date.strftime('%Y-%m-%d') if hasattr(obj, 'place') and obj.place: result += '\tLocation: %s\n' % obj.place if hasattr(obj, 'society_name') and obj.society_name: result += '\tSociety : %s\n' % obj.society_name if hasattr(obj, 'contract_type') and obj.contract_type: result += '\tContract : %s\n' % obj.contract_type return result.strip('\n\t') class Handjoob(ReplApplication): APPNAME = 'handjoob' VERSION = '0.i' COPYRIGHT = 'Copyright(C) 2012 Bezleputh' DESCRIPTION = "Console application to search for a job." SHORT_DESCRIPTION = "search for a job" CAPS = ICapJob EXTRA_FORMATTERS = {'job_advert_list': JobAdvertListFormatter, 'job_advert': JobAdvertFormatter, } COMMANDS_FORMATTERS = {'search': 'job_advert_list', 'ls': 'job_advert_list', 'info': 'job_advert', } @defaultcount(10) def do_search(self, pattern): """ search PATTERN Search for an advert matching a PATTERN. """ self.change_path([u'search']) self.start_format(pattern=pattern) for backend, job_advert in self.do('search_job', pattern): self.cached_format(job_advert)<|fim▁hole|> @defaultcount(10) def do_ls(self, line): """ advanced search Search for an advert matching to advanced filters. """ self.change_path([u'advanced']) for backend, job_advert in self.do('advanced_search_job'): self.cached_format(job_advert) def complete_info(self, text, line, *ignored): args = line.split(' ') if len(args) == 2: return self._complete_object() def do_info(self, _id): """ info ID Get information about an advert. """ if not _id: print >>sys.stderr, 'This command takes an argument: %s' % self.get_command_help('info', short=True) return 2 job_advert = self.get_object(_id, 'get_job_advert') if not job_advert: print >>sys.stderr, 'Job advert not found: %s' % _id return 3 self.start_format() self.format(job_advert)<|fim▁end|>
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main import ( "api/handlers" "fmt" "net/http" "runtime" "time" ) func init() { runtime.GOMAXPROCS(runtime.NumCPU()) } func main() { fmt.Println("Server is start at ", time.Now().String(), " , on port 8080") http.HandleFunc("/useage", handlers.Useage)<|fim▁hole|> http.HandleFunc("/v1/", handlers.API_V1) http.ListenAndServe(":8080", nil) }<|fim▁end|>
<|file_name|>views.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python from django.http import HttpResponse from django.template import RequestContext, loader from django.views.decorators.csrf import csrf_exempt import django.shortcuts<|fim▁hole|>#----------------------------------------------------------------------------- from wlokalu.logging import getLogger, message as log logger = getLogger(__name__) #----------------------------------------------------------------------------- @csrf_exempt def list(request, nick = None): template = loader.get_template("list.html") from django.core.urlresolvers import reverse from forms import PresenceForm form = PresenceForm() if nick is not None: form.initial['nick'] = nick form_target = reverse(list, kwargs = {'nick': nick}) else: form_target = reverse(list) if request.POST.get('nick', '') != '': context = { 'address': request.META['REMOTE_ADDR'], 'uri': request.META['REQUEST_URI'], } if 'enter' in request.POST: presence.person_entered(request.POST['nick'], context) else: # 'leave' in request.POST presence.person_left(request.POST['nick'], context) # tell the browser to reload the page, but with GET request return django.shortcuts.redirect(request.path) context = RequestContext(request, { 'form_target': form_target, 'form': form, 'present': presence.list_people(), 'sensors': presence.list_simple_sensors(), 'complex_sensors': presence.list_complex_sensors(), }) return HttpResponse(template.render(context)) #----------------------------------------------------------------------------- # vim:ft=python:foldmethod=marker<|fim▁end|>
from wlokalu.api import presence
<|file_name|>train.py<|end_file_name|><|fim▁begin|>from __future__ import print_function import argparse from collections import OrderedDict import json import os import logging from keras.callbacks import EarlyStopping from sklearn.preprocessing import normalize from sklearn.metrics import roc_curve, auc, roc_auc_score, precision_score, recall_score, f1_score, accuracy_score, average_precision_score from scipy.sparse import csr_matrix from keras.utils.io_utils import HDF5Matrix #from keras.utils.visualize_util import plot from keras.optimizers import SGD, Adam from sklearn.metrics import r2_score import numpy as np import theano.tensor as tt import pandas as pd import random import common import models from predict import obtain_predictions from eval import do_eval import h5py class Config(object): """Configuration for the training process.""" def __init__(self, params, normalize=False, whiten=True): self.model_id = common.get_next_model_id() self.norm = normalize self.whiten = whiten self.x_path = '%s_%sx%s' % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window']) self.y_path = '%s_%s_%s' % (params['dataset']['fact'],params['dataset']['dim'],params['dataset']['dataset']) self.dataset_settings = params['dataset'] self.training_params = params['training'] self.model_arch = params['cnn'] self.predicting_params = params['predicting'] def get_dict(self): object_dict = self.__dict__ first_key = "model_id" conf_dict = OrderedDict({first_key: object_dict[first_key]}) conf_dict.update(object_dict) return conf_dict def _squared_magnitude(x): return tt.sqr(x).sum(axis=-1) def _magnitude(x): return tt.sqrt(tt.maximum(_squared_magnitude(x), np.finfo(x.dtype).tiny)) def cosine(x, y): return tt.clip((1 - (x * y).sum(axis=-1) / (_magnitude(x) * _magnitude(y))) / 2, 0, 1) def load_sparse_csr(filename): loader = np.load(filename) return csr_matrix(( loader['data'], loader['indices'], loader['indptr']), shape = loader['shape']) def build_model(config): """Builds the cnn.""" params = config.model_arch get_model = getattr(models, 'get_model_'+str(params['architecture'])) model = get_model(params) #model = model_kenun.build_convnet_model(params) # Learning setup t_params = config.training_params sgd = SGD(lr=t_params["learning_rate"], decay=t_params["decay"], momentum=t_params["momentum"], nesterov=t_params["nesterov"]) adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08) optimizer = eval(t_params['optimizer']) metrics = ['mean_squared_error'] if config.model_arch["final_activation"] == 'softmax': metrics.append('categorical_accuracy') if t_params['loss_func'] == 'cosine': loss_func = eval(t_params['loss_func']) else: loss_func = t_params['loss_func'] model.compile(loss=loss_func, optimizer=optimizer,metrics=metrics) return model def load_data_preprocesed(params, X_path, Y_path, dataset, val_percent, test_percent, n_samples, with_metadata=False, only_metadata=False, metadata_source='rovi'): factors = np.load(common.DATASETS_DIR+'/y_train_'+Y_path+'.npy') # OJO remove S index_factors = open(common.DATASETS_DIR+'/items_index_train_'+dataset+'.tsv').read().splitlines() if not only_metadata: all_X = np.load(common.TRAINDATA_DIR+'/X_train_'+X_path+'.npy') index_train = open(common.TRAINDATA_DIR+'/index_train_%s.tsv' % (X_path)).read().splitlines() all_Y = np.zeros((len(index_train),factors.shape[1])) index_factors_inv = dict() for i,item in enumerate(index_factors): index_factors_inv[item] = i for i,item in enumerate(index_train): all_Y[i,:] = factors[index_factors_inv[item]] else: all_Y = factors if with_metadata: if 'w2v' in metadata_source: all_X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,dataset))[:,:int(params['cnn']['sequence_length'])] elif 'model' in metadata_source or not params['dataset']['sparse']: all_X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,dataset)) else: all_X_meta = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (metadata_source,dataset)).todense() all_X_in_meta = all_X = all_X_meta print(all_X.shape) print(all_Y.shape) if n_samples != 'all': n_samples = int(n_samples) all_X = all_X[:n_samples] all_Y = all_Y[:n_samples] if with_metadata: all_X_in_meta = all_X_in_meta[:n_samples] if params['training']['normalize_y'] == True: normalize(all_Y,copy=False) if params['training']["val_from_file"]: Y_val = np.load(common.DATASETS_DIR+'/y_val_'+Y_path+'.npy') Y_test = np.load(common.DATASETS_DIR+'/y_test_'+Y_path+'.npy') #!!! OJO remove S from trainS if params['dataset']['sparse']: X_val = load_sparse_csr(common.TRAINDATA_DIR+'/X_val_%s_%s.npz' % (metadata_source,dataset)).todense() X_test = load_sparse_csr(common.TRAINDATA_DIR+'/X_test_%s_%s.npz' % (metadata_source,dataset)).todense() else: X_val = np.load(common.TRAINDATA_DIR+'/X_val_%s_%s.npy' % (metadata_source,dataset)) X_test = np.load(common.TRAINDATA_DIR+'/X_test_%s_%s.npy' % (metadata_source,dataset)) X_train = all_X Y_train = all_Y else: N = all_Y.shape[0] train_percent = 1 - val_percent - test_percent N_train = int(train_percent * N) N_val = int(val_percent * N) logging.debug("Training data points: %d" % N_train) logging.debug("Validation data points: %d" % N_val) logging.debug("Test data points: %d" % (N - N_train - N_val)) if not only_metadata: # Slice data X_train = all_X[:N_train] X_val = all_X[N_train:N_train + N_val] X_test = all_X[N_train + N_val:] Y_train = all_Y[:N_train] Y_val = all_Y[N_train:N_train + N_val] Y_test = all_Y[N_train + N_val:] if with_metadata: if only_metadata: X_train = all_X_in_meta[:N_train] X_val = all_X_in_meta[N_train:N_train + N_val] X_test = all_X_in_meta[N_train + N_val:] else: X_train = [X_train,all_X_in_meta[:N_train]] X_val = [X_val,all_X_in_meta[N_train:N_train + N_val]] X_test = [X_test,all_X_in_meta[N_train + N_val:]] return X_train, Y_train, X_val, Y_val, X_test, Y_test def load_data_hf5(params,val_percent, test_percent): hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%s.hdf5" % (params['dataset']['dataset'],params['dataset']['window']) f = h5py.File(hdf5_file,"r") N = f["targets"].shape[0] f.close() train_percent = 1 - val_percent - test_percent N_train = int(train_percent * N) N_val = int(val_percent * N) X_train = HDF5Matrix(hdf5_file, 'features', start=0, end=N_train) Y_train = HDF5Matrix(hdf5_file, 'targets', start=0, end=N_train) X_val = HDF5Matrix(hdf5_file, 'features', start=N_train, end=N_train+N_val) Y_val = HDF5Matrix(hdf5_file, 'targets', start=N_train, end=N_train+N_val) X_test = HDF5Matrix(hdf5_file, 'features', start=N_train+N_val, end=N) Y_test = HDF5Matrix(hdf5_file, 'targets', start=N_train+N_val, end=N) return X_train, Y_train, X_val, Y_val, X_test, Y_test, N_train def load_data_hf5_memory(params,val_percent, test_percent, y_path, id2gt, X_meta = None, val_from_file = False): if val_from_file: hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window']) f = h5py.File(hdf5_file,"r") index_train = f["index"][:] index_train = np.delete(index_train, np.where(index_train == "")) N_train = index_train.shape[0] val_hdf5_file = common.PATCHES_DIR+"/patches_val_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window']) f_val = h5py.File(val_hdf5_file,"r") X_val = f_val['features'][:] #Y_val = f_val['targets'][:] factors_val = np.load(common.DATASETS_DIR+'/y_val_'+y_path+'.npy') index_factors_val = open(common.DATASETS_DIR+'/items_index_val_'+params['dataset']['dataset']+'.tsv').read().splitlines() id2gt_val = dict((index,factor) for (index,factor) in zip(index_factors_val,factors_val)) index_val = [i for i in f_val['index'][:] if i in id2gt_val] X_val = np.delete(X_val, np.where(index_val == ""), axis=0) index_val = np.delete(index_val, np.where(index_val == "")) Y_val = np.asarray([id2gt_val[id] for id in index_val]) test_hdf5_file = common.PATCHES_DIR+"/patches_test_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window']) f_test = h5py.File(test_hdf5_file,"r") X_test = f_test['features'][:] #Y_test = f_test['targets'][:] factors_test = np.load(common.DATASETS_DIR+'/y_test_'+y_path+'.npy') index_factors_test = open(common.DATASETS_DIR+'/items_index_test_'+params['dataset']['dataset']+'.tsv').read().splitlines() id2gt_test = dict((index,factor) for (index,factor) in zip(index_factors_test,factors_test)) index_test = [i for i in f_test['index'][:] if i in id2gt_test] X_test = np.delete(X_test, np.where(index_test == ""), axis=0) index_test = np.delete(index_test, np.where(index_test == "")) Y_test = np.asarray([id2gt_test[id] for id in index_test]) else: hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window']) f = h5py.File(hdf5_file,"r") index_all = f["index"][:] N = index_all.shape[0] train_percent = 1 - val_percent - test_percent N_train = int(train_percent * N) N_val = int(val_percent * N) X_val = f['features'][N_train:N_train+N_val] index_val = f['index'][N_train:N_train+N_val] X_val = np.delete(X_val, np.where(index_val == ""), axis=0) index_val = np.delete(index_val, np.where(index_val == "")) Y_val = np.asarray([id2gt[id] for id in index_val]) X_test = f['features'][N_train+N_val:N] index_test = f['index'][N_train+N_val:N] print(index_test.shape) print(X_test.shape) X_test = np.delete(X_test, np.where(index_test == ""), axis=0) index_test = np.delete(index_test, np.where(index_test == "")) print(index_test.shape) print(X_test.shape) Y_test = np.asarray([id2gt[id] for id in index_test]) print(Y_test.shape) index_train = f['index'][:N_train] index_train = np.delete(index_train, np.where(index_train == "")) N_train = index_train.shape[0] if X_meta != None: X_val = [X_val,X_meta[N_train:N_train+N_val]] X_test = [X_test,X_meta[N_train+N_val:N]] return X_val, Y_val, X_test, Y_test, N_train def batch_block_generator(params, y_path, N_train, id2gt, X_meta=None, val_from_file=False): hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window']) f = h5py.File(hdf5_file,"r") block_step = 50000 batch_size = params['training']['n_minibatch'] randomize = True with_meta = False if X_meta != None: with_meta = True while 1: for i in range(0, N_train, block_step): x_block = f['features'][i:min(N_train, i+block_step)] index_block = f['index'][i:min(N_train, i+block_step)] #y_block = f['targets'][i:min(N_train,i+block_step)] x_block = np.delete(x_block, np.where(index_block == ""), axis=0) index_block = np.delete(index_block, np.where(index_block == "")) y_block = np.asarray([id2gt[id] for id in index_block]) if params['training']['normalize_y']: normalize(y_block, copy=False) items_list = range(x_block.shape[0]) if randomize: random.shuffle(items_list) for j in range(0, len(items_list), batch_size): if j+batch_size <= x_block.shape[0]: items_in_batch = items_list[j:j+batch_size] x_batch = x_block[items_in_batch] y_batch = y_block[items_in_batch] if with_meta: x_batch = [x_batch, X_meta[items_in_batch]] yield (x_batch, y_batch) def process(params,with_predict=True,with_eval=True): logging.basicConfig(format='%(asctime)s %(message)s', level=logging.DEBUG) params['cnn']['n_out'] = int(params['dataset']['dim']) #params['cnn']['n_frames'] = int(params['dataset']['window'] * SR / float(HR)) with_metadata = params['dataset']['with_metadata'] only_metadata = params['dataset']['only_metadata'] metadata_source = params['dataset']['meta-suffix'] if with_metadata: if 'w2v' in metadata_source: X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,params['dataset']['dataset']))[:,:int(params['cnn']['sequence_length'])] params['cnn']['n_metafeatures'] = len(X_meta[0]) if 'meta-suffix2' in params['dataset']: X_meta2 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix2'],params['dataset']['dataset'])) params['cnn']['n_metafeatures2'] = len(X_meta2[0]) if 'meta-suffix3' in params['dataset']: X_meta3 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix3'],params['dataset']['dataset'])) params['cnn']['n_metafeatures3'] = len(X_meta3[0]) if 'meta-suffix4' in params['dataset']: X_meta4 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix4'],params['dataset']['dataset'])) params['cnn']['n_metafeatures4'] = len(X_meta4[0]) elif 'model' in metadata_source or not params['dataset']['sparse']: X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,params['dataset']['dataset'])) params['cnn']['n_metafeatures'] = len(X_meta[0]) if 'meta-suffix2' in params['dataset']: X_meta2 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix2'],params['dataset']['dataset'])) params['cnn']['n_metafeatures2'] = len(X_meta2[0]) if 'meta-suffix3' in params['dataset']: X_meta3 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix3'],params['dataset']['dataset'])) params['cnn']['n_metafeatures3'] = len(X_meta3[0]) if 'meta-suffix4' in params['dataset']: X_meta4 = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (params['dataset']['meta-suffix4'],params['dataset']['dataset'])) params['cnn']['n_metafeatures4'] = len(X_meta4[0]) else: X_meta = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (metadata_source,params['dataset']['dataset'])).todense() params['cnn']['n_metafeatures'] = X_meta.shape[1] if 'meta-suffix2' in params['dataset']: X_meta2 = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (params['dataset']['meta-suffix2'],params['dataset']['dataset'])) params['cnn']['n_metafeatures2'] = X_meta2.shape[1] if 'meta-suffix3' in params['dataset']: X_meta3 = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (params['dataset']['meta-suffix3'],params['dataset']['dataset'])) params['cnn']['n_metafeatures3'] = len(X_meta3[0]) if 'meta-suffix4' in params['dataset']: X_meta4 = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (params['dataset']['meta-suffix4'],params['dataset']['dataset'])) params['cnn']['n_metafeatures3'] = len(X_meta4[0]) print(X_meta.shape)<|fim▁hole|> config = Config(params) model_dir = os.path.join(common.MODELS_DIR, config.model_id) common.ensure_dir(common.MODELS_DIR) common.ensure_dir(model_dir) model_file = os.path.join(model_dir, config.model_id + common.MODEL_EXT) logging.debug("Building Network...") #model = build_model(config) model = build_model(config) print(model.summary()) #plot(model, to_file='model2.png', show_shapes=True) trained_model = config.get_dict() # Save model #plot(model, to_file=os.path.join(model_dir, config.model_id + PLOT_EXT)) common.save_model(model, model_file) logging.debug(trained_model["model_id"]) logging.debug("Loading Data...") with_generator = True if only_metadata: X_train, Y_train, X_val, Y_val, X_test, Y_test = \ load_data_preprocesed(params, config.x_path, config.y_path, params['dataset']['dataset'], config.training_params["validation"], config.training_params["test"], config.dataset_settings["nsamples"], with_metadata, only_metadata, metadata_source) if 'meta-suffix2' in params['dataset']: X_train2, Y_train2, X_val2, Y_val2, X_test2, Y_test2 = \ load_data_preprocesed(params, config.x_path, config.y_path, params['dataset']['dataset'], config.training_params["validation"], config.training_params["test"], config.dataset_settings["nsamples"], with_metadata, only_metadata, params['dataset']['meta-suffix2']) X_train = [X_train,X_train2] X_val = [X_val,X_val2] X_test = [X_test,X_test2] print("X_train bi", len(X_train)) if 'meta-suffix3' in params['dataset']: X_train3, Y_train3, X_val3, Y_val3, X_test3, Y_test3 = \ load_data_preprocesed(params, config.x_path, config.y_path, params['dataset']['dataset'], config.training_params["validation"], config.training_params["test"], config.dataset_settings["nsamples"], with_metadata, only_metadata, params['dataset']['meta-suffix3']) X_train.append(X_train3) X_val.append(X_val3) X_test.append(X_test3) print("X_train tri", len(X_train)) if 'meta-suffix4' in params['dataset']: X_train4, Y_train4, X_val4, Y_val4, X_test4, Y_test4 = \ load_data_preprocesed(params, config.x_path, config.y_path, params['dataset']['dataset'], config.training_params["validation"], config.training_params["test"], config.dataset_settings["nsamples"], with_metadata, only_metadata, params['dataset']['meta-suffix4']) X_train.append(X_train4) X_val.append(X_val4) X_test.append(X_test4) print("X_train four", len(X_train)) else: if with_generator: id2gt = dict() factors = np.load(common.DATASETS_DIR+'/y_train_'+config.y_path+'.npy') index_factors = open(common.DATASETS_DIR+'/items_index_train_'+params['dataset']['dataset']+'.tsv').read().splitlines() id2gt = dict((index,factor) for (index,factor) in zip(index_factors,factors)) X_val, Y_val, X_test, Y_test, N_train = load_data_hf5_memory(params,config.training_params["validation"],config.training_params["test"],config.y_path,id2gt,X_meta,config.training_params["val_from_file"]) if params['dataset']['nsamples'] != 'all': N_train = min(N_train,params['dataset']['nsamples']) else: X_train, Y_train, X_val, Y_val, X_test, Y_test, N_train = load_data_hf5(params,config.training_params["validation"],config.training_params["test"]) trained_model["whiten_scaler"] = common.TRAINDATA_DIR+'/scaler_%s.pk' % config.x_path logging.debug("Training...") if config.model_arch["final_activation"] == 'softmax': monitor_metric = 'val_categorical_accuracy' else: monitor_metric = 'val_loss' early_stopping = EarlyStopping(monitor=monitor_metric, patience=4) if only_metadata: epochs = model.fit(X_train, Y_train, batch_size=config.training_params["n_minibatch"], #shuffle='batch', nb_epoch=config.training_params["n_epochs"], verbose=1, validation_data=(X_val, Y_val), callbacks=[early_stopping]) else: if with_generator: print(N_train) epochs = model.fit_generator(batch_block_generator(params,config.y_path,N_train,id2gt,X_meta,config.training_params["val_from_file"]), samples_per_epoch = N_train-(N_train % config.training_params["n_minibatch"]), nb_epoch = config.training_params["n_epochs"], verbose=1, validation_data = (X_val, Y_val), callbacks=[early_stopping]) else: epochs = model.fit(X_train, Y_train, batch_size=config.training_params["n_minibatch"], shuffle='batch', nb_epoch=config.training_params["n_epochs"], verbose=1, validation_data=(X_val, Y_val), callbacks=[early_stopping]) model.save_weights(os.path.join(model_dir, config.model_id + common.WEIGHTS_EXT)) logging.debug("Saving trained model %s in %s..." % (trained_model["model_id"], common.DEFAULT_TRAINED_MODELS_FILE)) common.save_trained_model(common.DEFAULT_TRAINED_MODELS_FILE, trained_model) logging.debug("Evaluating...") print(X_test[0].shape,X_test[1].shape) preds=model.predict(X_test) print(preds.shape) if params["dataset"]["evaluation"] in ['binary','multiclass']: y_pred = (preds > 0.5).astype('int32') acc = accuracy_score(Y_test,y_pred) prec = precision_score(Y_test,y_pred,average='macro') recall = recall_score(Y_test,y_pred,average='macro') f1 = f1_score(Y_test,y_pred,average='macro') print('Accuracy', acc) print("%.3f\t%.3f\t%.3f" % (prec,recall,f1)) if params["dataset"]["fact"] == 'class': good_classes = np.nonzero(Y_test.sum(0))[0] print(Y_test.shape,preds.shape) #roc_auc=roc_auc_score(Y_test[:,good_classes],preds[:,good_classes]) #logging.debug('ROC-AUC '+str(roc_auc)) #pr_auc = average_precision_score(Y_test[:,good_classes],preds[:,good_classes]) #print('PR-AUC',pr_auc) #r2 = roc_auc elif params["dataset"]["evaluation"] not in ['binary','multiclass','multilabel']: r2s = [] for i,pred in enumerate(preds): r2 = r2_score(Y_test[i],pred) r2s.append(r2) r2 = np.asarray(r2s).mean() logging.debug('R2 avg '+str(r2)) # Batch prediction if X_test[1].shape == Y_test[1].shape: score = model.evaluate(X_test, Y_test, verbose=0) logging.debug(score) logging.debug(model.metrics_names) print(score) trained_model["loss_score"] = score[0] trained_model["mse"] = score[1] if params["dataset"]["evaluation"] not in ['binary','multiclass','multilabel']: trained_model["r2"] = r2 fw=open(common.DATA_DIR+'/results/train_results.txt','a') fw.write(trained_model["model_id"]+'\n') if params["training"]["loss_func"] == 'binary_crossentropy': fw.write('ROC-AUC: '+str(roc_auc)+'\n') print('ROC-AUC: '+str(roc_auc)) fw.write('Loss: '+str(score[0])+' ('+config.training_params["loss_func"]+')\n') fw.write('MSE: '+str(score[1])+'\n') elif params["dataset"]["evaluation"] not in ['binary','multiclass','multilabel']: fw.write('R2 avg: '+str(r2)+'\n') print('R2 avg: '+str(r2)) fw.write('Loss: '+str(score[0])+' ('+config.training_params["loss_func"]+')\n') fw.write('MSE: '+str(score[1])+'\n') fw.write(json.dumps(epochs.history)+"\n\n") fw.close() if with_predict: trained_models = pd.read_csv(common.DEFAULT_TRAINED_MODELS_FILE, sep='\t') model_config = trained_models[trained_models["model_id"] == trained_model["model_id"]] model_config = model_config.to_dict(orient="list") testset = open(common.DATASETS_DIR+'/items_index_test_%s.tsv' % (config.dataset_settings["dataset"])).read().splitlines() if config.training_params["val_from_file"] and not only_metadata: predictions, predictions_index = obtain_predictions(model_config, testset, trained_model["model_id"], config.predicting_params["trim_coeff"], model=model, with_metadata=with_metadata, only_metadata=only_metadata, metadata_source=metadata_source, with_patches=True) else: predictions, predictions_index = obtain_predictions(model_config, testset, trained_model["model_id"], config.predicting_params["trim_coeff"], model=model, with_metadata=with_metadata, only_metadata=only_metadata, metadata_source=metadata_source) print("Predictions created") if with_eval: do_eval(trained_model["model_id"],get_roc=True,get_map=True,get_p=True,predictions=predictions,predictions_index=predictions_index) if __name__ == '__main__': parser = argparse.ArgumentParser( description='Evaluates the model', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-p', '--params', dest="params_file", help='JSON file with params', default=False) parser.add_argument('-pred', '--predict', dest="with_predict", help='Predict factors', action='store_true', default=False) parser.add_argument('-eval', '--eval', dest="with_eval", help='Eval factors', action='store_true', default=False) parser.add_argument('-m', '--metadata', dest="with_metadata", help='Use metadata', action='store_true', default=False) parser.add_argument('-om', '--only_metadata', dest="only_metadata", help='Use only metadata', action='store_true', default=False) parser.add_argument('-ms', '--metadata_source', dest="metadata_source", type=str, help='Suffix of metadata files', default="rovi") args = parser.parse_args() params = models.params_1 if args.params_file: params = json.load(open(args.params_file)) process(params)<|fim▁end|>
else: X_meta = None
<|file_name|>NetworkConfigurator.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2006-2007 Sun Microsystems, Inc. All rights reserved. * * The Sun Project JXTA(TM) Software License * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. The end-user documentation included with the redistribution, if any, must * include the following acknowledgment: "This product includes software * developed by Sun Microsystems, Inc. for JXTA(TM) technology." * Alternately, this acknowledgment may appear in the software itself, if * and wherever such third-party acknowledgments normally appear. * * 4. The names "Sun", "Sun Microsystems, Inc.", "JXTA" and "Project JXTA" must * not be used to endorse or promote products derived from this software * without prior written permission. For written permission, please contact * Project JXTA at http://www.jxta.org. * * 5. Products derived from this software may not be called "JXTA", nor may * "JXTA" appear in their name, without prior written permission of Sun. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SUN * MICROSYSTEMS OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * JXTA is a registered trademark of Sun Microsystems, Inc. in the United * States and other countries. * * Please see the license information page at : * <http://www.jxta.org/project/www/license.html> for instructions on use of * the license in source files. * * ==================================================================== * * This software consists of voluntary contributions made by many individuals * on behalf of Project JXTA. For more information on Project JXTA, please see * http://www.jxta.org. * * This license is based on the BSD license adopted by the Apache Foundation. */ package net.jxta.platform; import net.jxta.document.Advertisement; import net.jxta.document.AdvertisementFactory; import net.jxta.document.MimeMediaType; import net.jxta.document.StructuredDocumentFactory; import net.jxta.document.StructuredDocumentUtils; import net.jxta.document.XMLDocument; import net.jxta.document.XMLElement; import net.jxta.endpoint.EndpointAddress; import net.jxta.id.ID; import net.jxta.id.IDFactory; import net.jxta.impl.membership.pse.PSEUtils; import net.jxta.impl.membership.pse.PSEUtils.IssuerInfo; import net.jxta.impl.peergroup.StdPeerGroup; import net.jxta.impl.protocol.HTTPAdv; import net.jxta.impl.protocol.PSEConfigAdv; import net.jxta.impl.protocol.PeerGroupConfigAdv; import net.jxta.impl.protocol.PlatformConfig; import net.jxta.impl.protocol.RdvConfigAdv; import net.jxta.impl.protocol.RdvConfigAdv.RendezVousConfiguration; import net.jxta.impl.protocol.RelayConfigAdv; import net.jxta.impl.protocol.TCPAdv; import net.jxta.logging.Logging; import net.jxta.peer.PeerID; import net.jxta.peergroup.PeerGroup; import net.jxta.peergroup.PeerGroupID; import net.jxta.protocol.ConfigParams; import net.jxta.protocol.TransportAdvertisement; import javax.security.cert.CertificateException; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.security.PrivateKey; import java.security.cert.X509Certificate; import java.util.Enumeration; import java.util.List; import java.util.MissingResourceException; import java.util.NoSuchElementException; import java.util.PropertyResourceBundle; import java.util.ResourceBundle; import java.util.Set; import java.util.logging.Logger; import net.jxta.impl.protocol.MulticastAdv; /** * NetworkConfigurator provides a simple programmatic interface for JXTA configuration. * <p/> * By default, it defines an edge configuration with TCP in auto mode w/port * range 9701-9799, multicast enabled on group "224.0.1.85", and port 1234, * HTTP transport with only outgoing enabled. * <p/> * By default a new PeerID is always generated. This can be overridden via * {@link NetworkConfigurator#setPeerID} method or loading a PlatformConfig via * {@link NetworkConfigurator#load}. * <p/> * A facility is provided to initialize a configuration by loading from an * existing configuration. This provides limited platform configuration lifecycle * management as well as configuration change management. * <p/> * Also by default, this class sets the default platform configurator to * {@link net.jxta.impl.peergroup.NullConfigurator}. <code>NullConfigurator<code> * is a no operation configurator intended to prevent any other configurators from * being invoked. * <p/> * NetworkConfigurator makes use of classes from the {@code net.jxta.impl.*} * packages. Applications are very strongly encouraged to avoid importing these * classes as their interfaces may change without notice in future JXTA releases. * The NetworkConfigurator API abstracts the configuration implementation details * and will provide continuity and stability i.e. the NetworkConfigurator API * won't change and it will automatically accommodate changes to service * configuration. * <p/> * <em> Configuration example :</em> * <pre> * NetworkConfigurator config = new NetworkConfigurator(); * if (!config.exists()) { * // Create a new configuration with a new name, principal, and pass * config.setName("New Name"); * config.setPrincipal("username"); * config.setPassword("password"); * try { * //persist it * config.save(); * } catch (IOException io) { * // deal with the io error * } * } else { * // Load the pre-existing configuration * File pc = new File(config.getHome(), "PlatformConfig"); * try { * config.load(pc.toURI()); * // make changes if so desired * .. * .. * // store the PlatformConfig under the default home * config.save(); * } catch (CertificateException ce) { * // In case the root cert is invalid, this creates a new one * try { * //principal * config.setPrincipal("principal"); * //password to encrypt private key with * config.setPassword("password"); * config.save(); * } catch (Exception e) { * e.printStackTrace(); * } * } * <p/> * </pre> * * @since JXTA JSE 2.4 */ public class NetworkConfigurator { /** * Logger */ private final static transient Logger LOG = Logger.getLogger(NetworkConfigurator.class.getName()); // begin configuration modes /** * Relay off Mode */ public final static int RELAY_OFF = 1 << 2; /** * Relay client Mode */ public final static int RELAY_CLIENT = 1 << 3; /** * Relay Server Mode */ public final static int RELAY_SERVER = 1 << 4; /** * Proxy Server Mode */ public final static int PROXY_SERVER = 1 << 5; /** * TCP transport client Mode */ public final static int TCP_CLIENT = 1 << 6; /** * TCP transport Server Mode */ public final static int TCP_SERVER = 1 << 7; /** * HTTP transport client Mode */ public final static int HTTP_CLIENT = 1 << 8; /** * HTTP transport server Mode */ public final static int HTTP_SERVER = 1 << 9; /** * IP multicast transport Mode */ public final static int IP_MULTICAST = 1 << 10; /** * RendezVousService Mode */ public final static int RDV_SERVER = 1 << 11; /** * RendezVousService Client */ public final static int RDV_CLIENT = 1 << 12; /** * RendezVousService Ad-Hoc mode */ public final static int RDV_AD_HOC = 1 << 13; /** * HTTP2 (netty http tunnel) client */ public final static int HTTP2_CLIENT = 1 << 14; /** * HTTP2 (netty http tunnel) server */ public final static int HTTP2_SERVER = 1 << 15; /** * Default AD-HOC configuration */ public final static int ADHOC_NODE = TCP_CLIENT | TCP_SERVER | IP_MULTICAST | RDV_AD_HOC | RELAY_OFF; /** * Default Edge configuration */ public final static int EDGE_NODE = TCP_CLIENT | TCP_SERVER | HTTP_CLIENT | HTTP2_CLIENT | IP_MULTICAST | RDV_CLIENT | RELAY_CLIENT; /** * Default Rendezvous configuration */ public final static int RDV_NODE = RDV_SERVER | TCP_CLIENT | TCP_SERVER | HTTP_SERVER | HTTP2_SERVER; /** * Default Relay configuration */ public final static int RELAY_NODE = RELAY_SERVER | TCP_CLIENT | TCP_SERVER | HTTP_SERVER | HTTP2_SERVER; // /** // * Default Proxy configuration // * // * @since 2.6 Will be removed in a future release // */ // @Deprecated // public final static int PROXY_NODE = PROXY_SERVER | RELAY_NODE; // /** // * Default Rendezvous/Relay/Proxy configuration // * // * @since 2.6 Will be removed in a future release // */ // @Deprecated // public final static int RDV_RELAY_PROXY_NODE = RDV_NODE | PROXY_NODE; // end configuration modes /** * Default mode */ protected transient int mode = EDGE_NODE; /** * Default PlatformConfig Peer Description */ protected transient String description = "Platform Config Advertisement created by : " + NetworkConfigurator.class.getName(); /** * The location which will serve as the parent for all stored items used * by JXTA. */ private transient URI storeHome = null; /** * Default peer name */ protected transient String name = "unknown"; /** * AuthenticationType used by PSEMembership to specify the type of authentication. */ protected transient String authenticationType = null; /** * Password value used to generate root Certificate and to protect the * Certificate's PrivateKey. */ protected transient String password = null; /** * Default PeerID */ protected transient PeerID peerid = null; /** * Principal value used to generate root certificate */ protected transient String principal = null; /** * Public Certificate chain */ protected transient X509Certificate[] cert = null; /** * Subject private key */ protected transient PrivateKey subjectPkey = null; /** * Freestanding keystore location */ protected transient URI keyStoreLocation = null; /** * Proxy Service Document */ @Deprecated protected transient XMLElement proxyConfig; /** * Personal Security Environment Config Advertisement * * @see net.jxta.impl.membership.pse.PSEConfig */ protected transient PSEConfigAdv pseConf; /** * Rendezvous Config Advertisement */ protected transient RdvConfigAdv rdvConfig; /** * Default Rendezvous Seeding URI */ protected URI rdvSeedingURI = null; /** * Relay Config Advertisement */ protected transient RelayConfigAdv relayConfig; /** * Default Relay Seeding URI */ protected transient URI relaySeedingURI = null; /** * TCP Config Advertisement */ protected transient TCPAdv tcpConfig; /** * Multicating Config Advertisement */ protected transient MulticastAdv multicastConfig; /** * Default TCP transport state */ protected transient boolean tcpEnabled = true; /** * Default Multicast transport state */ protected transient boolean multicastEnabled = true; /** * HTTP Config Advertisement */ protected transient HTTPAdv httpConfig; /** * Default HTTP transport state */ protected transient boolean httpEnabled = true; /** * HTTP2 Config Advertisement */ protected transient TCPAdv http2Config; /** * Default HTTP2 transport state */ protected transient boolean http2Enabled = true; /** * Infrastructure Peer Group Configuration */ protected transient PeerGroupConfigAdv infraPeerGroupConfig; /** * Creates NetworkConfigurator instance with default AD-HOC configuration * * @param storeHome the URI to persistent store * @return NetworkConfigurator instance with default AD-HOC configuration */ public static NetworkConfigurator newAdHocConfiguration(URI storeHome) { return new NetworkConfigurator(ADHOC_NODE, storeHome); } /** * Creates NetworkConfigurator instance with default Edge configuration * * @param storeHome the URI to persistent store * @return NetworkConfigurator instance with default AD-HOC configuration */ public static NetworkConfigurator newEdgeConfiguration(URI storeHome) { return new NetworkConfigurator(EDGE_NODE, storeHome); } /** * Creates NetworkConfigurator instance with default Rendezvous configuration * * @param storeHome the URI to persistent store * @return NetworkConfigurator instance with default Rendezvous configuration */ public static NetworkConfigurator newRdvConfiguration(URI storeHome) { return new NetworkConfigurator(RDV_NODE, storeHome); } /** * Creates NetworkConfigurator instance with default Relay configuration * * @param storeHome the URI to persistent store * @return NetworkConfigurator instance with default Relay configuration */ public static NetworkConfigurator newRelayConfiguration(URI storeHome) { return new NetworkConfigurator(RELAY_NODE, storeHome); } /** * Creates NetworkConfigurator instance with default Rendezvous configuration * * @param storeHome the URI to persistent store * @return NetworkConfigurator instance with default Rendezvous configuration */ public static NetworkConfigurator newRdvRelayConfiguration(URI storeHome) { return new NetworkConfigurator(RDV_NODE | RELAY_SERVER, storeHome); } // /** // * Creates NetworkConfigurator instance with default Proxy configuration // * // * @param storeHome the URI to persistent store // * @return NetworkConfigurator instance with defaultProxy configuration // * // * @since 2.6 Will be removed in a future release // */ // @Deprecated // public static NetworkConfigurator newProxyConfiguration(URI storeHome) { // return new NetworkConfigurator(PROXY_NODE, storeHome); // } // /** // * Creates NetworkConfigurator instance with default Rendezvous, Relay, Proxy configuration // * // * @param storeHome the URI to persistent store // * @return NetworkConfigurator instance with default Rendezvous, Relay, Proxy configuration // * // * @since 2.6 It will be removed in a future release // */ // @Deprecated // public static NetworkConfigurator newRdvRelayProxyConfiguration(URI storeHome) { // return new NetworkConfigurator(RDV_RELAY_PROXY_NODE, storeHome); // } /** * Creates the default NetworkConfigurator. The configuration is stored with a default configuration mode of EDGE_NODE */ public NetworkConfigurator() { this(EDGE_NODE, new File(".jxta").toURI()); } /** * Creates a NetworkConfigurator with the default configuration of the * specified mode. <p/>Valid modes include ADHOC_NODE, EDGE_NODE, RDV_NODE * PROXY_NODE, RELAY_NODE, RDV_RELAY_PROXY_NODE, or any combination of * specific configuration.<p/> e.g. RDV_NODE | HTTP_CLIENT * * @param mode the new configuration mode * @param storeHome the URI to persistent store * @see #setMode */ public NetworkConfigurator(int mode, URI storeHome) { Logging.logCheckedFine(LOG, "Creating a default configuration"); setStoreHome(storeHome); httpConfig = createHttpAdv(); rdvConfig = createRdvConfigAdv(); relayConfig = createRelayConfigAdv(); // proxyConfig = createProxyAdv(); tcpConfig = createTcpAdv(); multicastConfig = createMulticastAdv(); http2Config = createHttp2Adv(); infraPeerGroupConfig = createInfraConfigAdv(); setMode(mode); } /** * Sets PlaformConfig Peer Description element * * @param description the peer description */ public void setDescription(String description) { this.description = description; } /** * Set the current directory for configuration and cache persistent store * <p/>(default is $CWD/.jxta) * <p/> * <dt>Simple example :</dt> * <pre> * <code> * //Create an application home * File appHome = new File(System.getProperty("JXTA_HOME", ".cache")); * //Create an instance home under the application home * File instanceHome = new File(appHome, instanceName); * jxtaConfig.setHome(instanceHome); * </code> * </pre> * * @param home the new home value * @see #getHome */ public void setHome(File home) { this.storeHome = home.toURI(); } /** * Returns the current directory for configuration and cache persistent * store. This is the same location as returned by {@link #getStoreHome()} * which is more general than this method. * * @return Returns the current home directory * @see #setHome */ public File getHome() { if ("file".equalsIgnoreCase(storeHome.getScheme())) { return new File(storeHome); } else { throw new UnsupportedOperationException("Home location is not a file:// URI : " + storeHome); } } /** * Returns the location which will serve as the parent for all stored items * used by JXTA. * * @return The location which will serve as the parent for all stored * items used by JXTA. * @see net.jxta.peergroup.PeerGroup#getStoreHome() */ public URI getStoreHome() { return storeHome; } /** * Sets the location which will serve as the parent for all stored items * used by JXTA. * * @param newHome new home directory URI * @see net.jxta.peergroup.PeerGroup#getStoreHome() */ public void setStoreHome(URI newHome) { // Fail if the URI is not absolute. if (!newHome.isAbsolute()) { throw new IllegalArgumentException("Only absolute URIs accepted for store home location."); } // Fail if the URI is Opaque. if (newHome.isOpaque()) { throw new IllegalArgumentException("Only hierarchical URIs accepted for store home location."); } // FIXME this should be removed when 1488 is committed if (!"file".equalsIgnoreCase(newHome.getScheme())) { throw new IllegalArgumentException("Only file based URI currently supported"); } // Adds a terminating / if (!newHome.toString().endsWith("/")) { newHome = URI.create(newHome.toString() + "/"); } storeHome = newHome; } /** * Toggles HTTP transport state * * @param enabled if true, enables HTTP transport */ public void setHttpEnabled(boolean enabled) { this.httpEnabled = enabled; if (!httpEnabled) { httpConfig.setClientEnabled(false); httpConfig.setServerEnabled(false); } } /** * Toggles the HTTP transport server (incoming) mode * * @param incoming toggles HTTP transport server mode */ public void setHttpIncoming(boolean incoming) { httpConfig.setServerEnabled(incoming); } /** * Toggles the HTTP transport client (outgoing) mode * * @param outgoing toggles HTTP transport client mode */ public void setHttpOutgoing(boolean outgoing) { httpConfig.setClientEnabled(outgoing); } /** * Sets the HTTP listening port (default 9901) * * @param port the new HTTP port value */ public void setHttpPort(int port) { httpConfig.setPort(port); } /** * Sets the HTTP interface Address to bind the HTTP transport to * <p/>e.g. "192.168.1.1" * * @param address the new address value */ public void setHttpInterfaceAddress(String address) { httpConfig.setInterfaceAddress(address); } /** * Returns the HTTP interface Address * * @param address the HTTP interface address */ public String getHttpInterfaceAddress() { return httpConfig.getInterfaceAddress(); } /** * Sets the HTTP JXTA Public Address * e.g. "192.168.1.1:9700" * * @param address the HTTP transport public address * @param exclusive determines whether an address is advertised exclusively */ public void setHttpPublicAddress(String address, boolean exclusive) { httpConfig.setServer(address); httpConfig.setPublicAddressOnly(exclusive); } public boolean isHttp2Enabled() { return http2Enabled; } public void setHttp2Enabled(boolean enabled) { http2Enabled = enabled; if(http2Enabled) { http2Config.setClientEnabled(false); http2Config.setServerEnabled(false); } } public boolean getHttp2IncomingStatus() { return http2Config.getServerEnabled(); } public void setHttp2Incoming(boolean enabled) { http2Config.setServerEnabled(enabled); } public boolean getHttp2OutgoingStatus() { return http2Config.getClientEnabled(); } public void setHttp2Outgoing(boolean enabled) { http2Config.setClientEnabled(enabled); } public int getHttp2Port() { return http2Config.getPort(); } public void setHttp2Port(int port) { http2Config.setPort(port); } public int getHttp2StartPort() { return http2Config.getStartPort(); } public void setHttp2StartPort(int startPort) { http2Config.setStartPort(startPort); } public int getHttp2EndPort() { return http2Config.getEndPort(); } public void setHttp2EndPort(int endPort) { http2Config.setEndPort(endPort); } public String getHttp2InterfaceAddress() { return http2Config.getInterfaceAddress(); } public void setHttp2InterfaceAddress(String address) { http2Config.setInterfaceAddress(address); } public String getHttp2PublicAddress() { return http2Config.getServer(); } public boolean isHttp2PublicAddressExclusive() { return http2Config.getPublicAddressOnly(); } public void setHttp2PublicAddress(String address, boolean exclusive) { http2Config.setServer(address); http2Config.setPublicAddressOnly(exclusive); } /** * Returns the HTTP JXTA Public Address * * @return exclusive determines whether an address is advertised exclusively */ public String getHttpPublicAddress() { return httpConfig.getServer(); } /** * Returns the HTTP JXTA Public Address exclusivity * * @return exclusive determines whether an address is advertised exclusively */ public boolean isHttpPublicAddressExclusive() { return httpConfig.getPublicAddressOnly(); } /** * Sets the ID which will be used for new net peer group instances. * <p/> * <p/>By Setting an alternate infrastructure PeerGroup ID (aka NetPeerGroup), * it prevents heterogeneous infrastructure PeerGroups from intersecting. * <p/>This is highly recommended practice for application deployment * * @param id the new infrastructure PeerGroupID as a string * @see net.jxta.peergroup.PeerGroupFactory#setNetPGID */ public void setInfrastructureID(ID id) { if (id == null || id.equals(ID.nullID)) { throw new IllegalArgumentException("PeerGroupID can not be null"); } infraPeerGroupConfig.setPeerGroupID(id); } /** * Sets the ID which will be used for new net peer group instances. * <p/> * <p/>By Setting an alternate infrastructure PeerGroup ID (aka NetPeerGroup), * it prevents heterogeneous infrastructure PeerGroups from intersecting. * <p/>This is highly recommended practice for application deployment * * @param idStr the new infrastructure PeerGroupID as a string * @see net.jxta.peergroup.PeerGroupFactory#setNetPGID */ public void setInfrastructureID(String idStr) { if (idStr == null || idStr.length() == 0) { throw new IllegalArgumentException("PeerGroupID string can not be empty or null"); } PeerGroupID pgid = (PeerGroupID) ID.create(URI.create(idStr)); setInfrastructureID(pgid); } /** * Gets the ID which will be used for new net peer group instances. * <p/> * * @return the infrastructure PeerGroupID as a string */ public String getInfrastructureIDStr() { return infraPeerGroupConfig.getPeerGroupID().toString(); } /** * Sets the infrastructure PeerGroup name meta-data * * @param name the Infrastructure PeerGroup name * @see net.jxta.peergroup.PeerGroupFactory#setNetPGName */ public void setInfrastructureName(String name) { infraPeerGroupConfig.setName(name); } /** * Gets the infrastructure PeerGroup name meta-data * * @return the Infrastructure PeerGroup name */ public String getInfrastructureName() { return infraPeerGroupConfig.getName(); } /** * Sets the infrastructure PeerGroup description meta-data * * @param description the infrastructure PeerGroup description * @see net.jxta.peergroup.PeerGroupFactory#setNetPGDesc */ public void setInfrastructureDescriptionStr(String description) { infraPeerGroupConfig.setDescription(description); } /** * Returns the infrastructure PeerGroup description meta-data * * @return the infrastructure PeerGroup description meta-data */ public String getInfrastructureDescriptionStr() { return infraPeerGroupConfig.getDescription(); } /** * Sets the infrastructure PeerGroup description meta-data * * @param description the infrastructure PeerGroup description * @see net.jxta.peergroup.PeerGroupFactory#setNetPGDesc */ public void setInfrastructureDesc(XMLElement description) { infraPeerGroupConfig.setDesc(description); } /** * Sets the current node configuration mode. * <p/>The default mode is EDGE, unless modified at construction time. * A node configuration mode defined a preset configuration * parameters based on a operating mode. i.e. an EDGE mode, enable * client/server side tcp, multicast, client side http, RelayService * client mode. * <p/> Valid modes include EDGE, RDV_SERVER, * RELAY_OFF, RELAY_CLIENT, RELAY_SERVER, PROXY_SERVER, or any combination * of which.<p/> e.g. RDV_SERVER + RELAY_SERVER * * @param mode the new configuration mode * @see #getMode */ public void setMode(int mode) { this.mode = mode; if ((mode & PROXY_SERVER) == PROXY_SERVER && ((mode & RELAY_SERVER) != RELAY_SERVER)) { mode = mode | RELAY_SERVER; } // RELAY config relayConfig.setClientEnabled((mode & RELAY_CLIENT) == RELAY_CLIENT); relayConfig.setServerEnabled((mode & RELAY_SERVER) == RELAY_SERVER); // RDV_SERVER if ((mode & RDV_SERVER) == RDV_SERVER) { rdvConfig.setConfiguration(RendezVousConfiguration.RENDEZVOUS); } else if ((mode & RDV_CLIENT) == RDV_CLIENT) { rdvConfig.setConfiguration(RendezVousConfiguration.EDGE); } else if ((mode & RDV_AD_HOC) == RDV_AD_HOC) { rdvConfig.setConfiguration(RendezVousConfiguration.AD_HOC); } // TCP tcpConfig.setClientEnabled((mode & TCP_CLIENT) == TCP_CLIENT); tcpConfig.setServerEnabled((mode & TCP_SERVER) == TCP_SERVER); // HTTP httpConfig.setClientEnabled((mode & HTTP_CLIENT) == HTTP_CLIENT); httpConfig.setServerEnabled((mode & HTTP_SERVER) == HTTP_SERVER); // HTTP2 http2Config.setClientEnabled((mode & HTTP2_CLIENT) == HTTP2_CLIENT); http2Config.setServerEnabled((mode & HTTP2_SERVER) == HTTP2_SERVER); // Multicast multicastConfig.setMulticastState((mode & IP_MULTICAST) == IP_MULTICAST); // EDGE if (mode == EDGE_NODE) { rdvConfig.setConfiguration(RendezVousConfiguration.EDGE); } } /** * Returns the current configuration mode * <p/>The default mode is EDGE, unless modified at construction time or through * Method {@link NetworkConfigurator#setMode}. A node configuration mode defined a preset configuration * parameters based on a operating mode. i.e. an EDGE mode, enable * client/server side tcp, multicast, client side http, RelayService * client mode. * * @return mode the current mode value * @see #setMode */ public int getMode() { return mode; } /** * Sets the IP group multicast packet size * * @param size the new multicast packet */ public void setMulticastSize(int size) { multicastConfig.setMulticastSize(size); } /** * Gets the IP group multicast packet size * * @return the multicast packet */ public int getMulticastSize() { return multicastConfig.getMulticastSize(); } /** * Sets the IP group multicast address (default 224.0.1.85) * * @param mcastAddress the new multicast group address * @see #setMulticastPort */ public void setMulticastAddress(String mcastAddress) { multicastConfig.setMulticastAddr(mcastAddress); } /** * Gets the multicast network interface * * @return the multicast network interface, null if none specified */ public String getMulticastInterface() { return multicastConfig.getMulticastInterface(); } /** * Sets the multicast network interface * * @param interfaceAddress multicast network interface */ public void setMulticastInterface(String interfaceAddress) { multicastConfig.setMulticastInterface(interfaceAddress); } /** * Sets the IP group multicast port (default 1234) * * @param port the new IP group multicast port * @see #setMulticastAddress */ public void setMulticastPort(int port) { multicastConfig.setMulticastPort(port); } /** * Sets the group multicast thread pool size (default 10) * * @param size the new multicast thread pool size */ public void setMulticastPoolSize(int size) { multicastConfig.setMulticastPoolSize(size); } /** * Sets the node name * * @param name node name */ public void setName(String name) { this.name = name; } /** * Gets the node name * * @return node name */ public String getName() { return this.name; } /** * Sets the Principal for the peer root certificate * * @param principal the new principal value * @see #setPassword * @see #getPrincipal * @see #setPrincipal */ public void setPrincipal(String principal) { this.principal = principal; } /** * Gets the Principal for the peer root certificate * * @return principal if a principal is set, null otherwise * @see #setPassword * @see #getPrincipal * @see #setPrincipal */ public String getPrincipal() { return principal; } /** * Sets the public Certificate for this configuration. * * @param cert the new cert value */ public void setCertificate(X509Certificate cert) { this.cert = new X509Certificate[]{cert}; } /** * Returns the public Certificate for this configuration. * * @return X509Certificate */ public X509Certificate getCertificate() { return (cert == null || cert.length == 0 ? null : cert[0]); } /** * Sets the public Certificate chain for this configuration. * * @param certificateChain the new Certificate chain value */ public void setCertificateChain(X509Certificate[] certificateChain) { this.cert = certificateChain; } /** * Gets the public Certificate chain for this configuration. * * @return X509Certificate chain */ public X509Certificate[] getCertificateChain() { return cert; } /** * Sets the Subject private key * * @param subjectPkey the subject private key */ public void setPrivateKey(PrivateKey subjectPkey) { this.subjectPkey = subjectPkey; } /** * Gets the Subject private key * * @return the subject private key */ public PrivateKey getPrivateKey() { return this.subjectPkey; } /** * Sets freestanding keystore location * * @param keyStoreLocation the absolute location of the freestanding keystore */ public void setKeyStoreLocation(URI keyStoreLocation) { this.keyStoreLocation = keyStoreLocation; } /** * Gets the freestanding keystore location * * @return the location of the freestanding keystore */ public URI getKeyStoreLocation() { return keyStoreLocation; } /** * Gets the authenticationType * * @return authenticationType the authenticationType value */ public String getAuthenticationType() { return this.authenticationType; } /** * Sets the authenticationType * * @param authenticationType the new authenticationType value */ public void setAuthenticationType(String authenticationType) { this.authenticationType = authenticationType; } /** * Sets the password used to sign the private key of the root certificate * * @param password the new password value * @see #setPassword * @see #getPrincipal * @see #setPrincipal */ public void setPassword(String password) { this.password = password; } /** * Gets the password used to sign the private key of the root certificate * * @return password if a password is set, null otherwise * @see #setPassword * @see #getPrincipal * @see #setPrincipal */ public String getPassword() { return password; } /** * Sets the PeerID (by default, a new PeerID is generated). * <p/>Note: Persist the PeerID generated, or use load() * to avoid overridding a node's PeerID between restarts. * * @param peerid the new <code>net.jxta.peer.PeerID</code> */ public void setPeerID(PeerID peerid) { this.peerid = peerid; } /** * Gets the PeerID * * @return peerid the <code>net.jxta.peer.PeerID</code> value */ public PeerID getPeerID() { return this.peerid; } /** * Sets Rendezvous Seeding URI * * @param seedURI Rendezvous service seeding URI */ public void addRdvSeedingURI(URI seedURI) { rdvConfig.addSeedingURI(seedURI); } // /** // * Sets Rendezvous Access Control URI // * <p/>e.g. http://rdv.jxtahosts.net/cgi-bin/rendezvousACL.cgi?3 // * // * @param aclURI Rendezvous Access Control URI // * // * @deprecated ACL seed lists are in functional conflict with 'UseOnlyRendezvousSeedsStatus'. // * They will be deprecated and removed in a future release. // */ // @Deprecated // public void setRdvACLURI(URI aclURI) { // rdvConfig.setAclUri(aclURI); // } // /** // * Gets Rendezvous Access Control URI if set // * <p/>e.g. http://rdv.jxtahosts.net/cgi-bin/rendezvousACL.cgi?3 // * // * @return aclURI Rendezvous Access Control URI // * // * @deprecated ACL seed lists are in functional conflict with 'UseOnlyRendezvousSeedsStatus'. // * They will be deprecated and removed in a future release. // */ // @Deprecated // public URI getRdvACLURI() { // return rdvConfig.getAclUri(); // } // /** // * Sets Relay Access Control URI // * <p/>e.g. http://rdv.jxtahosts.net/cgi-bin/relayACL.cgi?3 // * // * @param aclURI Relay Access Control URI // * // * @deprecated ACL seed lists are in functional conflict with 'UseOnlyRelaySeedsStatus'. // * They will be deprecated and removed in a future release. // */ // @Deprecated // public void setRelayACLURI(URI aclURI) { // relayConfig.setAclUri(aclURI); // } // /** // * Gets Relay Access Control URI if set // * <p/>e.g. http://rdv.jxtahosts.net/cgi-bin/relayACL.cgi?3 // * // * @return aclURI Relay Access Control URI<|fim▁hole|>// * @deprecated ACL seed lists are in functional conflict with 'UseOnlyRelaySeedsStatus'. // * They will be deprecated and removed in a future release. // */ // @Deprecated // public URI getRelayACLURI() { // return relayConfig.getAclUri(); // } /** * Sets the RelayService maximum number of simultaneous relay clients * * @param relayMaxClients the new relayMaxClients value */ public void setRelayMaxClients(int relayMaxClients) { if ((relayMaxClients != -1) && (relayMaxClients <= 0)) { throw new IllegalArgumentException("Relay Max Clients : " + relayMaxClients + " must be > 0"); } relayConfig.setMaxClients(relayMaxClients); } /** * Sets the RelayService Seeding URI * <p/>A seeding URI (when read) is expected to provide a list of * physical endpoint addresse(s) to relay peers * * @param seedURI RelayService seeding URI */ public void addRelaySeedingURI(URI seedURI) { relayConfig.addSeedingURI(seedURI); } /** * Sets the RendezVousService maximum number of simultaneous rendezvous clients * * @param rdvMaxClients the new rendezvousMaxClients value */ public void setRendezvousMaxClients(int rdvMaxClients) { if ((rdvMaxClients != -1) && (rdvMaxClients <= 0)) { throw new IllegalArgumentException("Rendezvous Max Clients : " + rdvMaxClients + " must be > 0"); } rdvConfig.setMaxClients(rdvMaxClients); } /** * Toggles TCP transport state * * @param enabled if true, enables TCP transport */ public void setTcpEnabled(boolean enabled) { this.tcpEnabled = enabled; if (!tcpEnabled) { tcpConfig.setClientEnabled(false); tcpConfig.setServerEnabled(false); } } /** * Sets the TCP transport listening port (default 9701) * * @param port the new tcpPort value */ public void setTcpPort(int port) { tcpConfig.setPort(port); } /** * Sets the lowest port on which the TCP Transport will listen if configured * to do so. Valid values are <code>-1</code>, <code>0</code> and * <code>1-65535</code>. The <code>-1</code> value is used to signify that * the port range feature should be disabled. The <code>0</code> specifies * that the Socket API dynamic port allocation should be used. For values * <code>1-65535</code> the value must be equal to or less than the value * used for end port. * * @param start the lowest port on which to listen. */ public void setTcpStartPort(int start) { tcpConfig.setStartPort(start); } /** * Returns the highest port on which the TCP Transport will listen if * configured to do so. Valid values are <code>-1</code>, <code>0</code> and * <code>1-65535</code>. The <code>-1</code> value is used to signify that * the port range feature should be disabled. The <code>0</code> specifies * that the Socket API dynamic port allocation should be used. For values * <code>1-65535</code> the value must be equal to or greater than the value * used for start port. * * @param end the new TCP end port */ public void setTcpEndPort(int end) { tcpConfig.setEndPort(end); } /** * Toggles TCP transport server (incoming) mode (default is on) * * @param incoming the new TCP server mode */ public void setTcpIncoming(boolean incoming) { tcpConfig.setServerEnabled(incoming); } /** * Toggles TCP transport client (outgoing) mode (default is true) * * @param outgoing the new tcpOutgoing value */ public void setTcpOutgoing(boolean outgoing) { tcpConfig.setClientEnabled(outgoing); } /** * Sets the TCP transport interface address * <p/>e.g. "192.168.1.1" * * @param address the TCP transport interface address */ public void setTcpInterfaceAddress(String address) { tcpConfig.setInterfaceAddress(address); } /** * Sets the node public address * <p/>e.g. "192.168.1.1:9701" * <p/>This address is the physical address defined in a node's * AccessPointAdvertisement. This often required for NAT'd/FW nodes * * @param address the TCP transport public address * @param exclusive public address advertised exclusively */ public void setTcpPublicAddress(String address, boolean exclusive) { tcpConfig.setServer(address); tcpConfig.setPublicAddressOnly(exclusive); } /** * Toggles whether to use IP group multicast (default is true) * * @param multicastOn the new useMulticast value */ public void setUseMulticast(boolean multicastOn) { multicastConfig.setMulticastState(multicastOn); } /** * Determines whether to restrict RelayService leases to those defined in * the seed list. In other words, only registered endpoint address seeds * and seeds fetched from seeding URIs will be used. * </p>WARNING: Disabling 'use only relay seed' will cause this peer to * search and fetch RdvAdvertisements for use as relay candidates. Rdvs * are not necessarily relays. * * @param useOnlyRelaySeeds restrict RelayService lease to seed list */ public void setUseOnlyRelaySeeds(boolean useOnlyRelaySeeds) { relayConfig.setUseOnlySeeds(useOnlyRelaySeeds); } /** * Determines whether to restrict RendezvousService leases to those defined in * the seed list. In other words, only registered endpoint address seeds * and seeds fetched from seeding URIs will be used. * * @param useOnlyRendezvouSeeds restrict RendezvousService lease to seed list */ public void setUseOnlyRendezvousSeeds(boolean useOnlyRendezvouSeeds) { rdvConfig.setUseOnlySeeds(useOnlyRendezvouSeeds); } /** * Adds RelayService peer seed address * <p/>A RelayService seed is defined as a physical endpoint address * <p/>e.g. http://192.168.1.1:9700, or tcp://192.168.1.1:9701 * * @param seedURI the relay seed URI */ public void addSeedRelay(URI seedURI) { relayConfig.addSeedRelay(seedURI.toString()); } /** * Adds Rendezvous peer seed, physical endpoint address * <p/>A RendezVousService seed is defined as a physical endpoint address * <p/>e.g. http://192.168.1.1:9700, or tcp://192.168.1.1:9701 * * @param seedURI the rendezvous seed URI */ public void addSeedRendezvous(URI seedURI) { rdvConfig.addSeedRendezvous(seedURI); } /** * Returns true if a PlatformConfig file exist under store home * * @return true if a PlatformConfig file exist under store home */ public boolean exists() { URI platformConfig = storeHome.resolve("PlatformConfig"); try { return null != read(platformConfig); } catch (IOException failed) { return false; } } /** * Sets the PeerID for this Configuration * * @param peerIdStr the new PeerID as a string */ public void setPeerId(String peerIdStr) { this.peerid = (PeerID) ID.create(URI.create(peerIdStr)); } /** * Sets the new RendezvousService seeding URI as a string. * <p/>A seeding URI (when read) is expected to provide a list of * physical endpoint address to rendezvous peers * * @param seedURIStr the new rendezvous seed URI as a string */ public void addRdvSeedingURI(String seedURIStr) { rdvConfig.addSeedingURI(URI.create(seedURIStr)); } /** * Sets the new RelayService seeding URI as a string. * <p/>A seeding URI (when read) is expected to provide a list of * physical endpoint address to relay peers * * @param seedURIStr the new RelayService seed URI as a string */ public void addRelaySeedingURI(String seedURIStr) { relayConfig.addSeedingURI(URI.create(seedURIStr)); } /** * Sets the List relaySeeds represented as Strings * <p/>A RelayService seed is defined as a physical endpoint address * <p/>e.g. http://192.168.1.1:9700, or tcp://192.168.1.1:9701 * * @param seeds the Set RelayService seed URIs as a string */ public void setRelaySeedURIs(List<String> seeds) { relayConfig.clearSeedRelays(); for (String seedStr : seeds) { relayConfig.addSeedRelay(new EndpointAddress(seedStr)); } } /** * Sets the relaySeeds represented as Strings * <p/>A seeding URI (when read) is expected to provide a list of * physical endpoint address to relay peers * * @param seedURIs the List relaySeeds represented as Strings */ public void setRelaySeedingURIs(Set<String> seedURIs) { relayConfig.clearSeedingURIs(); for (String seedStr : seedURIs) { relayConfig.addSeedingURI(URI.create(seedStr)); } } /** * Clears the List of RelayService seeds */ public void clearRelaySeeds() { relayConfig.clearSeedRelays(); } /** * Clears the List of RelayService seeding URIs */ public void clearRelaySeedingURIs() { relayConfig.clearSeedingURIs(); } /** * Sets the List of RendezVousService seeds represented as Strings * <p/>A RendezvousService seed is defined as a physical endpoint address * <p/>e.g. http://192.168.1.1:9700, or tcp://192.168.1.1:9701 * * @param seeds the Set of rendezvousSeeds represented as Strings */ public void setRendezvousSeeds(Set<String> seeds) { rdvConfig.clearSeedRendezvous(); for (String seedStr : seeds) { rdvConfig.addSeedRendezvous(URI.create(seedStr)); } } /** * Sets the List of RendezVousService seeding URIs represented as Strings. * A seeding URI (when read) is expected to provide a list of * physical endpoint address to rendezvous peers. * * @param seedingURIs the List rendezvousSeeds represented as Strings. */ public void setRendezvousSeedingURIs(List<String> seedingURIs) { rdvConfig.clearSeedingURIs(); for (String seedStr : seedingURIs) { rdvConfig.addSeedingURI(URI.create(seedStr)); } } /** * Clears the list of RendezVousService seeds */ public void clearRendezvousSeeds() { rdvConfig.clearSeedRendezvous(); } /** * Clears the list of RendezVousService seeding URIs */ public void clearRendezvousSeedingURIs() { rdvConfig.clearSeedingURIs(); } /** * Load a configuration from the specified store home uri * <p/> * e.g. file:/export/dist/EdgeConfig.xml, e.g. http://configserver.net/configservice?Edge * * @return The loaded configuration. * @throws IOException if an i/o error occurs * @throws CertificateException if the MembershipService is invalid */ public ConfigParams load() throws IOException, CertificateException { return load(storeHome.resolve("PlatformConfig")); } /** * Loads a configuration from a specified uri * <p/> * e.g. file:/export/dist/EdgeConfig.xml, e.g. http://configserver.net/configservice?Edge * * @param uri the URI to PlatformConfig * @return The loaded configuration. * @throws IOException if an i/o error occurs * @throws CertificateException if the MemebershipService is invalid */ public ConfigParams load(URI uri) throws IOException, CertificateException { if (uri == null) throw new IllegalArgumentException("URI can not be null"); Logging.logCheckedFine(LOG, "Loading configuration : ", uri); PlatformConfig platformConfig = read(uri); name = platformConfig.getName(); peerid = platformConfig.getPeerID(); description = platformConfig.getDescription(); XMLElement<?> param; // TCP tcpEnabled = platformConfig.isSvcEnabled(PeerGroup.tcpProtoClassID); tcpConfig = loadTcpAdv(platformConfig, PeerGroup.tcpProtoClassID); multicastEnabled = platformConfig.isSvcEnabled(PeerGroup.multicastProtoClassID); multicastConfig = loadMulticastAdv(platformConfig, PeerGroup.multicastProtoClassID); // HTTP try { param = (XMLElement) platformConfig.getServiceParam(PeerGroup.httpProtoClassID); httpEnabled = platformConfig.isSvcEnabled(PeerGroup.httpProtoClassID); Enumeration httpChilds = param.getChildren(TransportAdvertisement.getAdvertisementType()); // get the TransportAdv from either TransportAdv if (httpChilds.hasMoreElements()) { param = (XMLElement) httpChilds.nextElement(); } else { throw new IllegalStateException("Missing HTTP Advertisment"); } // Read-in the adv as it is now. httpConfig = (HTTPAdv) AdvertisementFactory.newAdvertisement(param); } catch (Exception failure) { IOException ioe = new IOException("error processing the HTTP config advertisement"); ioe.initCause(failure); throw ioe; } // HTTP2 http2Enabled = platformConfig.isSvcEnabled(PeerGroup.http2ProtoClassID); http2Config = loadTcpAdv(platformConfig, PeerGroup.http2ProtoClassID); // // ProxyService // try { // param = (XMLElement) platformConfig.getServiceParam(PeerGroup.proxyClassID); // if (param != null && !platformConfig.isSvcEnabled(PeerGroup.proxyClassID)) { // mode = mode | PROXY_SERVER; // } // } catch (Exception failure) { // IOException ioe = new IOException("error processing the pse config advertisement"); // ioe.initCause(failure); // throw ioe; // } // Rendezvous try { param = (XMLElement) platformConfig.getServiceParam(PeerGroup.rendezvousClassID); // backwards compatibility param.addAttribute("type", RdvConfigAdv.getAdvertisementType()); rdvConfig = (RdvConfigAdv) AdvertisementFactory.newAdvertisement(param); if (rdvConfig.getConfiguration() == RendezVousConfiguration.AD_HOC) { mode = mode | RDV_AD_HOC; } else if (rdvConfig.getConfiguration() == RendezVousConfiguration.EDGE) { mode = mode | RDV_CLIENT; } else if (rdvConfig.getConfiguration() == RendezVousConfiguration.RENDEZVOUS) { mode = mode | RDV_SERVER; } } catch (Exception failure) { IOException ioe = new IOException("error processing the rendezvous config advertisement"); ioe.initCause(failure); throw ioe; } // Relay try { param = (XMLElement) platformConfig.getServiceParam(PeerGroup.relayProtoClassID); if (param != null && !platformConfig.isSvcEnabled(PeerGroup.relayProtoClassID)) { mode = mode | RELAY_OFF; } // backwards compatibility param.addAttribute("type", RelayConfigAdv.getAdvertisementType()); relayConfig = (RelayConfigAdv) AdvertisementFactory.newAdvertisement(param); } catch (Exception failure) { IOException ioe = new IOException("error processing the relay config advertisement"); ioe.initCause(failure); throw ioe; } // PSE param = (XMLElement) platformConfig.getServiceParam(PeerGroup.membershipClassID); if (param != null) { Advertisement adv = null; try { adv = AdvertisementFactory.newAdvertisement(param); } catch (NoSuchElementException notAnAdv) { CertificateException cnfe = new CertificateException("No membership advertisement found"); cnfe.initCause(notAnAdv); } catch (IllegalArgumentException invalidAdv) { CertificateException cnfe = new CertificateException("Invalid membership advertisement"); cnfe.initCause(invalidAdv); } if (adv instanceof PSEConfigAdv) { pseConf = (PSEConfigAdv) adv; cert = pseConf.getCertificateChain(); } else { throw new CertificateException("Error processing the Membership config advertisement. Unexpected membership advertisement " + adv.getAdvertisementType()); } } // Infra Group infraPeerGroupConfig = (PeerGroupConfigAdv) platformConfig.getSvcConfigAdvertisement(PeerGroup.peerGroupClassID); if (null == infraPeerGroupConfig) { infraPeerGroupConfig = createInfraConfigAdv(); try { URI configPropsURI = storeHome.resolve("config.properties"); InputStream configPropsIS = configPropsURI.toURL().openStream(); ResourceBundle rsrcs = new PropertyResourceBundle(configPropsIS); configPropsIS.close(); NetGroupTunables tunables = new NetGroupTunables(rsrcs, new NetGroupTunables()); infraPeerGroupConfig.setPeerGroupID(tunables.id); infraPeerGroupConfig.setName(tunables.name); infraPeerGroupConfig.setDesc(tunables.desc); } catch (IOException ignored) { //ignored } catch (MissingResourceException ignored) { //ignored } } return platformConfig; } private TCPAdv loadTcpAdv(PlatformConfig platformConfig, ModuleClassID moduleClassID) { XMLElement<?> param = (XMLElement<?>) platformConfig.getServiceParam(moduleClassID); Enumeration<?> tcpChilds = param.getChildren(TransportAdvertisement.getAdvertisementType()); // get the TransportAdv from either TransportAdv or tcpConfig if (tcpChilds.hasMoreElements()) { param = (XMLElement<?>) tcpChilds.nextElement(); } else { throw new IllegalStateException("Missing TCP Advertisement"); } return (TCPAdv) AdvertisementFactory.newAdvertisement(param); } private MulticastAdv loadMulticastAdv(PlatformConfig platformConfig, ModuleClassID moduleClassID) { XMLElement<?> param2 = (XMLElement<?>) platformConfig.getServiceParam(moduleClassID); Enumeration<?> tcpChilds2 = param2.getChildren(TransportAdvertisement.getAdvertisementType()); // get the TransportAdv from either TransportAdv or multicastConfig if (tcpChilds2.hasMoreElements()) { param2 = (XMLElement<?>) tcpChilds2.nextElement(); } else { throw new IllegalStateException("Missing Multicast Advertisment"); } return (MulticastAdv) AdvertisementFactory.newAdvertisement(param2); } /** * Persists a PlatformConfig advertisement under getStoreHome()+"/PlaformConfig" * <p/> * Home may be overridden by a call to setHome() * * @throws IOException If there is a failure saving the PlatformConfig. * @see #load */ public void save() throws IOException { httpEnabled = (httpConfig.isClientEnabled() || httpConfig.isServerEnabled()); tcpEnabled = (tcpConfig.isClientEnabled() || tcpConfig.isServerEnabled()); http2Enabled = (http2Config.isClientEnabled() || http2Config.isServerEnabled()); ConfigParams advertisement = getPlatformConfig(); OutputStream out = null; try { if ("file".equalsIgnoreCase(storeHome.getScheme())) { File saveDir = new File(storeHome); saveDir.mkdirs(); // Sadly we can't use URL.openConnection() to create the // OutputStream for file:// URLs. bogus. out = new FileOutputStream(new File(saveDir, "PlatformConfig")); } else { out = storeHome.resolve("PlatformConfig").toURL().openConnection().getOutputStream(); } XMLDocument aDoc = (XMLDocument) advertisement.getDocument(MimeMediaType.XMLUTF8); OutputStreamWriter os = new OutputStreamWriter(out, "UTF-8"); aDoc.sendToWriter(os); os.flush(); } finally { if (null != out) { out.close(); } } } /** * Returns a XMLDocument representation of an Advertisement * * @param enabled whether the param doc is enabled, adds a "isOff" * element if disabled * @param adv the Advertisement to retrieve the param doc from * @return the parmDoc value */ protected XMLDocument getParmDoc(boolean enabled, Advertisement adv) { XMLDocument parmDoc = (XMLDocument) StructuredDocumentFactory.newStructuredDocument(MimeMediaType.XMLUTF8, "Parm"); XMLDocument doc = (XMLDocument) adv.getDocument(MimeMediaType.XMLUTF8); StructuredDocumentUtils.copyElements(parmDoc, parmDoc, doc); if (!enabled) { parmDoc.appendChild(parmDoc.createElement("isOff")); } return parmDoc; } /** * Creates an HTTP transport advertisement * * @return an HTTP transport advertisement */ protected HTTPAdv createHttpAdv() { httpConfig = (HTTPAdv) AdvertisementFactory.newAdvertisement(HTTPAdv.getAdvertisementType()); httpConfig.setProtocol("http"); httpConfig.setPort(9700); httpConfig.setClientEnabled((mode & HTTP_CLIENT) == HTTP_CLIENT); httpConfig.setServerEnabled((mode & HTTP_SERVER) == HTTP_SERVER); return httpConfig; } /** * Creates Personal Security Environment Config Advertisement * <p/>The configuration advertisement can include an optional seed certificate * chain and encrypted private key. If this seed information is present the PSE * Membership Service will require an initial authentication to unlock the * encrypted private key before creating the PSE keystore. The newly created * PSE keystore will be "seeded" with the certificate chain and the private key. * * @param principal principal * @param password the password used to sign the private key of the root certificate * @return PSEConfigAdv an PSE config advertisement * @see net.jxta.impl.protocol.PSEConfigAdv */ protected PSEConfigAdv createPSEAdv(String principal, String password) { pseConf = (PSEConfigAdv) AdvertisementFactory.newAdvertisement(PSEConfigAdv.getAdvertisementType()); if (principal != null && password != null) { IssuerInfo info = PSEUtils.genCert(principal, null); pseConf.setCertificate(info.cert); pseConf.setPrivateKey(info.subjectPkey, password.toCharArray()); } return pseConf; } /** * Creates Personal Security Environment Config Advertisement * <p/>The configuration advertisement can include an optional seed certificate * chain and encrypted private key. If this seed information is present the PSE * Membership Service will require an initial authentication to unlock the * encrypted private key before creating the PSE keystore. The newly created * PSE keystore will be "seeded" with the certificate chain and the private key. * * @param cert X509Certificate * @return PSEConfigAdv an PSE config advertisement * @see net.jxta.impl.protocol.PSEConfigAdv */ protected PSEConfigAdv createPSEAdv(X509Certificate cert) { pseConf = (PSEConfigAdv) AdvertisementFactory.newAdvertisement(PSEConfigAdv.getAdvertisementType()); if (subjectPkey != null && password != null) { pseConf.setCertificate(cert); pseConf.setPrivateKey(subjectPkey, password.toCharArray()); } return pseConf; } /** * Creates Personal Security Environment Config Advertisement * <p/>The configuration advertisement can include an optional seed certificate * chain and encrypted private key. If this seed information is present the PSE * Membership Service will require an initial authentication to unlock the * encrypted private key before creating the PSE keystore. The newly created * PSE keystore will be "seeded" with the certificate chain and the private key. * * @param certificateChain X509Certificate[] * @return PSEConfigAdv an PSE config advertisement * @see net.jxta.impl.protocol.PSEConfigAdv */ protected PSEConfigAdv createPSEAdv(X509Certificate[] certificateChain) { pseConf = (PSEConfigAdv) AdvertisementFactory.newAdvertisement(PSEConfigAdv.getAdvertisementType()); if (subjectPkey != null && password != null) { pseConf.setCertificateChain(certificateChain); pseConf.setPrivateKey(subjectPkey, password.toCharArray()); } return pseConf; } /** * Creates a ProxyService configuration advertisement * * @return ProxyService configuration advertisement */ @Deprecated protected XMLDocument createProxyAdv() { return (XMLDocument) StructuredDocumentFactory.newStructuredDocument(MimeMediaType.XMLUTF8, "Parm"); } /** * Creates a RendezVousService configuration advertisement with default values (EDGE) * * @return a RdvConfigAdv */ protected RdvConfigAdv createRdvConfigAdv() { rdvConfig = (RdvConfigAdv) AdvertisementFactory.newAdvertisement(RdvConfigAdv.getAdvertisementType()); if (mode == RDV_AD_HOC) { rdvConfig.setConfiguration(RendezVousConfiguration.AD_HOC); } else if ((mode & RDV_CLIENT) == RDV_CLIENT) { rdvConfig.setConfiguration(RendezVousConfiguration.EDGE); } else if ((mode & RDV_SERVER) == RDV_SERVER) { rdvConfig.setConfiguration(RendezVousConfiguration.RENDEZVOUS); } // A better alternative is to reference rdv service defaults (currently private) // rdvConfig.setMaxClients(200); return rdvConfig; } /** * Creates a RelayService configuration advertisement with default values (EDGE) * * @return a RelayConfigAdv */ protected RelayConfigAdv createRelayConfigAdv() { relayConfig = (RelayConfigAdv) AdvertisementFactory.newAdvertisement(RelayConfigAdv.getAdvertisementType()); // Since 2.6 - We should only use seeds when it comes to relay (see Javadoc) // relayConfig.setUseOnlySeeds(false); relayConfig.setClientEnabled((mode & RELAY_CLIENT) == RELAY_CLIENT || mode == EDGE_NODE); relayConfig.setServerEnabled((mode & RELAY_SERVER) == RELAY_SERVER); return relayConfig; } /** * Creates an TCP transport advertisement with the platform default values. * multicast on, 224.0.1.85:1234, with a max packet size of 16K * * @return a TCP transport advertisement */ protected TCPAdv createTcpAdv() { tcpConfig = (TCPAdv) AdvertisementFactory.newAdvertisement(TCPAdv.getAdvertisementType()); tcpConfig.setProtocol("tcp"); tcpConfig.setInterfaceAddress(null); tcpConfig.setPort(9701); //tcpConfig.setStartPort(9701); //tcpConfig.setEndPort(9799); tcpConfig.setServer(null); tcpConfig.setClientEnabled((mode & TCP_CLIENT) == TCP_CLIENT); tcpConfig.setServerEnabled((mode & TCP_SERVER) == TCP_SERVER); return tcpConfig; } /** * Creates an multicast transport advertisement with the platform default values. * Multicast on, 224.0.1.85:1234, with a max packet size of 16K. * * @return a TCP transport advertisement */ protected MulticastAdv createMulticastAdv() { multicastConfig = (MulticastAdv) AdvertisementFactory.newAdvertisement(MulticastAdv.getAdvertisementType()); multicastConfig.setProtocol("tcp"); multicastConfig.setMulticastAddr("224.0.1.85"); multicastConfig.setMulticastPort(1234); multicastConfig.setMulticastSize(16384); multicastConfig.setMulticastState((mode & IP_MULTICAST) == IP_MULTICAST); return multicastConfig; } protected TCPAdv createHttp2Adv() { http2Config = (TCPAdv) AdvertisementFactory.newAdvertisement(TCPAdv.getAdvertisementType()); http2Config.setProtocol("http2"); http2Config.setInterfaceAddress(null); http2Config.setPort(8080); http2Config.setStartPort(8080); http2Config.setEndPort(8089); http2Config.setServer(null); http2Config.setClientEnabled((mode & HTTP2_CLIENT) == TCP_CLIENT); http2Config.setServerEnabled((mode & HTTP2_SERVER) == TCP_SERVER); return http2Config; } protected PeerGroupConfigAdv createInfraConfigAdv() { infraPeerGroupConfig = (PeerGroupConfigAdv) AdvertisementFactory.newAdvertisement( PeerGroupConfigAdv.getAdvertisementType()); NetGroupTunables tunables = new NetGroupTunables(ResourceBundle.getBundle("net.jxta.impl.config"), new NetGroupTunables()); infraPeerGroupConfig.setPeerGroupID(tunables.id); infraPeerGroupConfig.setName(tunables.name); infraPeerGroupConfig.setDesc(tunables.desc); return infraPeerGroupConfig; } /** * Returns a PlatformConfig which represents a platform configuration. * <p/>Fine tuning is achieved through accessing each configured advertisement * and achieved through accessing each configured advertisement and modifying * each object directly. * * @return the PeerPlatformConfig Advertisement */ public ConfigParams getPlatformConfig() { PlatformConfig advertisement = (PlatformConfig) AdvertisementFactory.newAdvertisement( PlatformConfig.getAdvertisementType()); advertisement.setName(name); advertisement.setDescription(description); if (tcpConfig != null) { boolean enabled = tcpEnabled && (tcpConfig.isServerEnabled() || tcpConfig.isClientEnabled()); advertisement.putServiceParam(PeerGroup.tcpProtoClassID, getParmDoc(enabled, tcpConfig)); } if (multicastConfig != null) { boolean enabled = multicastConfig.getMulticastState(); advertisement.putServiceParam(PeerGroup.multicastProtoClassID, getParmDoc(enabled, multicastConfig)); } if (httpConfig != null) { boolean enabled = httpEnabled && (httpConfig.isServerEnabled() || httpConfig.isClientEnabled()); advertisement.putServiceParam(PeerGroup.httpProtoClassID, getParmDoc(enabled, httpConfig)); } if (http2Config != null) { boolean enabled = http2Enabled && (http2Config.isServerEnabled() || http2Config.isClientEnabled()); advertisement.putServiceParam(PeerGroup.http2ProtoClassID, getParmDoc(enabled, http2Config)); } if (relayConfig != null) { boolean isOff = ((mode & RELAY_OFF) == RELAY_OFF) || (relayConfig.isServerEnabled() && relayConfig.isClientEnabled()); XMLDocument relayDoc = (XMLDocument) relayConfig.getDocument(MimeMediaType.XMLUTF8); if (isOff) { relayDoc.appendChild(relayDoc.createElement("isOff")); } advertisement.putServiceParam(PeerGroup.relayProtoClassID, relayDoc); } if (rdvConfig != null) { XMLDocument rdvDoc = (XMLDocument) rdvConfig.getDocument(MimeMediaType.XMLUTF8); advertisement.putServiceParam(PeerGroup.rendezvousClassID, rdvDoc); } if (principal == null) { principal = System.getProperty("impl.membership.pse.authentication.principal", "JxtaCN"); } if (password == null) { password = System.getProperty("impl.membership.pse.authentication.password", "the!one!password"); } if (cert != null) { pseConf = createPSEAdv(cert); } else { pseConf = createPSEAdv(principal, password); cert = pseConf.getCertificateChain(); } if (pseConf != null) { if (keyStoreLocation != null) { if (keyStoreLocation.isAbsolute()) { pseConf.setKeyStoreLocation(keyStoreLocation); } else { Logging.logCheckedWarning(LOG, "Keystore location set, but is not absolute: ", keyStoreLocation); } } XMLDocument pseDoc = (XMLDocument) pseConf.getDocument(MimeMediaType.XMLUTF8); advertisement.putServiceParam(PeerGroup.membershipClassID, pseDoc); } if (authenticationType == null) { authenticationType = System.getProperty("impl.membership.pse.authentication.type", "StringAuthentication"); } StdPeerGroup.setPSEMembershipServiceKeystoreInfoFactory(new StdPeerGroup.DefaultPSEMembershipServiceKeystoreInfoFactory(authenticationType, password)); if (peerid == null) { peerid = IDFactory.newPeerID(PeerGroupID.worldPeerGroupID, cert[0].getPublicKey().getEncoded()); } advertisement.setPeerID(peerid); // if (proxyConfig != null && ((mode & PROXY_SERVER) == PROXY_SERVER)) { // advertisement.putServiceParam(PeerGroup.proxyClassID, proxyConfig); // } if ((null != infraPeerGroupConfig) && (null != infraPeerGroupConfig.getPeerGroupID()) && (ID.nullID != infraPeerGroupConfig.getPeerGroupID()) && (PeerGroupID.defaultNetPeerGroupID != infraPeerGroupConfig.getPeerGroupID())) { advertisement.setSvcConfigAdvertisement(PeerGroup.peerGroupClassID, infraPeerGroupConfig); } return advertisement; } /** * @param location The location of the platform config. * @return The platformConfig * @throws IOException Thrown for failures reading the PlatformConfig. */ private PlatformConfig read(URI location) throws IOException { URL url; try { url = location.toURL(); } catch (MalformedURLException mue) { IllegalArgumentException failure = new IllegalArgumentException("Failed to convert URI to URL"); failure.initCause(mue); throw failure; } InputStream input = url.openStream(); try { XMLDocument document = (XMLDocument) StructuredDocumentFactory.newStructuredDocument(MimeMediaType.XMLUTF8, input); PlatformConfig platformConfig = (PlatformConfig) AdvertisementFactory.newAdvertisement(document); return platformConfig; } finally { input.close(); } } /** * Indicates whether Http is enabled * * @return true if Http is enabled, else returns false * @see #setHttpEnabled */ public boolean isHttpEnabled() { return this.httpEnabled; } /** * Retrieves the Http incoming status * * @return true if Http incomming status is enabled, else returns false * @see #setHttpIncoming */ public boolean getHttpIncomingStatus() { return httpConfig.getServerEnabled(); } /** * Retrieves the Http outgoing status * * @return true if Http outgoing status is enabled, else returns false * @see #setHttpOutgoing */ public boolean getHttpOutgoingStatus() { return httpConfig.getClientEnabled(); } /** * Retrieves the Http port * * @return the current Http port * @see #setHttpPort */ public int getHttpPort() { return httpConfig.getPort(); } /** * Retrieves the current infrastructure ID * * @return the current infrastructure ID * @see #setInfrastructureID */ public ID getInfrastructureID() { return infraPeerGroupConfig.getPeerGroupID(); } /** * Retrieves the current multicast address * * @return the current multicast address * @see #setMulticastAddress */ public String getMulticastAddress() { return multicastConfig.getMulticastAddr(); } /** * Retrieves the current multicast port * * @return the current mutlicast port * @see #setMulticastPort */ public int getMulticastPort() { return multicastConfig.getMulticastPort(); } /** * Gets the group multicast thread pool size * * @return multicast thread pool size */ public int getMulticastPoolSize() { return multicastConfig.getMulticastPoolSize(); } /** * Indicates whether tcp is enabled * * @return true if tcp is enabled, else returns false * @see #setTcpEnabled */ public boolean isTcpEnabled() { return this.tcpEnabled; } /** * Retrieves the current tcp end port * * @return the current tcp port * @see #setTcpEndPort */ public int getTcpEndport() { return tcpConfig.getEndPort(); } /** * Retrieves the Tcp incoming status * * @return true if tcp incoming is enabled, else returns false * @see #setTcpIncoming */ public boolean getTcpIncomingStatus() { return tcpConfig.getServerEnabled(); } /** * Retrieves the Tcp outgoing status * * @return true if tcp outcoming is enabled, else returns false * @see #setTcpOutgoing */ public boolean getTcpOutgoingStatus() { return tcpConfig.getClientEnabled(); } /** * Retrieves the Tcp interface address * * @return the current tcp interface address * @see #setTcpInterfaceAddress */ public String getTcpInterfaceAddress() { return tcpConfig.getInterfaceAddress(); } /** * Retrieves the current Tcp port * * @return the current tcp port * @see #setTcpPort */ public int getTcpPort() { return tcpConfig.getPort(); } /** * Retrieves the current Tcp public address * * @return the current tcp public address * @see #setTcpPublicAddress */ public String getTcpPublicAddress() { return tcpConfig.getServer(); } /** * Indicates whether the current Tcp public address is exclusive * * @return true if the current tcp public address is exclusive, else returns false * @see #setTcpPublicAddress */ public boolean isTcpPublicAddressExclusive() { return tcpConfig.getPublicAddressOnly(); } /** * Retrieves the current Tcp start port * * @return the current tcp start port * @see #setTcpStartPort */ public int getTcpStartPort() { return tcpConfig.getStartPort(); } /** * Retrieves the multicast use status * * @return true if multicast is enabled, else returns false * @see #setUseMulticast */ public boolean getMulticastStatus() { return multicastConfig.getMulticastState(); } /** * Retrieves the use relay seeds only status * * @return true if only relay seeds are used, else returns false * @see #setUseOnlyRelaySeeds */ public boolean getUseOnlyRelaySeedsStatus() { return relayConfig.getUseOnlySeeds(); } /** * Retrieves the use rendezvous seeds only status * * @return true if only rendezvous seeds are used, else returns false * @see #setUseOnlyRendezvousSeeds */ public boolean getUseOnlyRendezvousSeedsStatus() { return rdvConfig.getUseOnlySeeds(); } /** * Retrieves the RendezVousService maximum number of simultaneous rendezvous clients * * @return the RendezVousService maximum number of simultaneous rendezvous clients * @see #setRendezvousMaxClients */ public int getRendezvousMaxClients() { return rdvConfig.getMaxClients(); } /** * Retrieves the RelayVousService maximum number of simultaneous relay clients * * @return the RelayService maximum number of simultaneous relay clients * @see #setRelayMaxClients */ public int getRelayMaxClients() { return relayConfig.getMaxClients(); } /** * Retrieves the rendezvous seedings * * @return the array of rendezvous seeding URL * @see #addRdvSeedingURI */ public URI[] getRdvSeedingURIs() { return rdvConfig.getSeedingURIs(); } /** * Retrieves the rendezvous seeds * * @return the array of rendezvous seeds URL * @see #addRdvSeedURI */ public URI[] getRdvSeedURIs() { return rdvConfig.getSeedRendezvous(); } /** * Retrieves the relay seeds * * @return the array of relay seeds URL * @see #addRelaySeedURI */ public URI[] getRelaySeedURIs() { return relayConfig.getSeedRelayURIs(); } /** * Retrieves the relay seeds * * @return the array of rendezvous seed URL * @see #addRelaySeedingURI */ public URI[] getRelaySeedingURIs() { return relayConfig.getSeedingURIs(); } /** * Holds the construction tunables for the Net Peer Group. This consists of * the peer group id, the peer group name and the peer group description. */ static class NetGroupTunables { final ID id; final String name; final XMLElement desc; /** * Constructor for loading the default Net Peer Group construction * tunables. */ NetGroupTunables() { id = PeerGroupID.defaultNetPeerGroupID; name = "NetPeerGroup"; desc = (XMLElement) StructuredDocumentFactory.newStructuredDocument(MimeMediaType.XMLUTF8, "desc", "default Net Peer Group"); } /** * Constructor for loading the default Net Peer Group construction * tunables. * * @param pgid the PeerGroupID * @param pgname the group name * @param pgdesc the group description */ NetGroupTunables(ID pgid, String pgname, XMLElement pgdesc) { id = pgid; name = pgname; desc = pgdesc; } /** * Constructor for loading the Net Peer Group construction * tunables from the provided resource bundle. * * @param rsrcs The resource bundle from which resources will be loaded. * @param defaults default values */ NetGroupTunables(ResourceBundle rsrcs, NetGroupTunables defaults) { ID idTmp; String nameTmp; XMLElement descTmp; try { String idTmpStr = rsrcs.getString("NetPeerGroupID").trim(); if (idTmpStr.startsWith(ID.URNNamespace + ":")) { idTmpStr = idTmpStr.substring(5); } idTmp = IDFactory.fromURI(new URI(ID.URIEncodingName + ":" + ID.URNNamespace + ":" + idTmpStr)); nameTmp = rsrcs.getString("NetPeerGroupName").trim(); descTmp = (XMLElement) StructuredDocumentFactory.newStructuredDocument(MimeMediaType.XMLUTF8, "desc", rsrcs.getString("NetPeerGroupDesc").trim()); } catch (Exception failed) { if (null != defaults) { Logging.logCheckedFine(LOG, "NetPeerGroup tunables not defined or could not be loaded. Using defaults.\n\n", failed); idTmp = defaults.id; nameTmp = defaults.name; descTmp = defaults.desc; } else { Logging.logCheckedSevere(LOG, "NetPeerGroup tunables not defined or could not be loaded.\n", failed); throw new IllegalStateException("NetPeerGroup tunables not defined or could not be loaded."); } } id = idTmp; name = nameTmp; desc = descTmp; } } }<|fim▁end|>
// *
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2014-2015 The Servo Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![cfg_attr(feature = "unstable", feature(core))] use std::cmp; use std::slice; static BIT_REV_U8: [u8; 256] = [ 0b0000_0000, 0b1000_0000, 0b0100_0000, 0b1100_0000, 0b0010_0000, 0b1010_0000, 0b0110_0000, 0b1110_0000, 0b0001_0000, 0b1001_0000, 0b0101_0000, 0b1101_0000, 0b0011_0000, 0b1011_0000, 0b0111_0000, 0b1111_0000, 0b0000_1000, 0b1000_1000, 0b0100_1000, 0b1100_1000, 0b0010_1000, 0b1010_1000, 0b0110_1000, 0b1110_1000, 0b0001_1000, 0b1001_1000, 0b0101_1000, 0b1101_1000, 0b0011_1000, 0b1011_1000, 0b0111_1000, 0b1111_1000, 0b0000_0100, 0b1000_0100, 0b0100_0100, 0b1100_0100, 0b0010_0100, 0b1010_0100, 0b0110_0100, 0b1110_0100, 0b0001_0100, 0b1001_0100, 0b0101_0100, 0b1101_0100, 0b0011_0100, 0b1011_0100, 0b0111_0100, 0b1111_0100, 0b0000_1100, 0b1000_1100, 0b0100_1100, 0b1100_1100, 0b0010_1100, 0b1010_1100, 0b0110_1100, 0b1110_1100, 0b0001_1100, 0b1001_1100, 0b0101_1100, 0b1101_1100, 0b0011_1100, 0b1011_1100, 0b0111_1100, 0b1111_1100, 0b0000_0010, 0b1000_0010, 0b0100_0010, 0b1100_0010, 0b0010_0010, 0b1010_0010, 0b0110_0010, 0b1110_0010, 0b0001_0010, 0b1001_0010, 0b0101_0010, 0b1101_0010, 0b0011_0010, 0b1011_0010, 0b0111_0010, 0b1111_0010, 0b0000_1010, 0b1000_1010, 0b0100_1010, 0b1100_1010, 0b0010_1010, 0b1010_1010, 0b0110_1010, 0b1110_1010, 0b0001_1010, 0b1001_1010, 0b0101_1010, 0b1101_1010, 0b0011_1010, 0b1011_1010, 0b0111_1010, 0b1111_1010, 0b0000_0110, 0b1000_0110, 0b0100_0110, 0b1100_0110, 0b0010_0110, 0b1010_0110, 0b0110_0110, 0b1110_0110, 0b0001_0110, 0b1001_0110, 0b0101_0110, 0b1101_0110, 0b0011_0110, 0b1011_0110, 0b0111_0110, 0b1111_0110, 0b0000_1110, 0b1000_1110, 0b0100_1110, 0b1100_1110, 0b0010_1110, 0b1010_1110, 0b0110_1110, 0b1110_1110, 0b0001_1110, 0b1001_1110, 0b0101_1110, 0b1101_1110, 0b0011_1110, 0b1011_1110, 0b0111_1110, 0b1111_1110, 0b0000_0001, 0b1000_0001, 0b0100_0001, 0b1100_0001, 0b0010_0001, 0b1010_0001, 0b0110_0001, 0b1110_0001, 0b0001_0001, 0b1001_0001, 0b0101_0001, 0b1101_0001, 0b0011_0001, 0b1011_0001, 0b0111_0001, 0b1111_0001, 0b0000_1001, 0b1000_1001, 0b0100_1001, 0b1100_1001, 0b0010_1001, 0b1010_1001, 0b0110_1001, 0b1110_1001, 0b0001_1001, 0b1001_1001, 0b0101_1001, 0b1101_1001, 0b0011_1001, 0b1011_1001, 0b0111_1001, 0b1111_1001, 0b0000_0101, 0b1000_0101, 0b0100_0101, 0b1100_0101, 0b0010_0101, 0b1010_0101, 0b0110_0101, 0b1110_0101, 0b0001_0101, 0b1001_0101, 0b0101_0101, 0b1101_0101, 0b0011_0101, 0b1011_0101, 0b0111_0101, 0b1111_0101, 0b0000_1101, 0b1000_1101, 0b0100_1101, 0b1100_1101, 0b0010_1101, 0b1010_1101, 0b0110_1101, 0b1110_1101, 0b0001_1101, 0b1001_1101, 0b0101_1101, 0b1101_1101, 0b0011_1101, 0b1011_1101, 0b0111_1101, 0b1111_1101, 0b0000_0011, 0b1000_0011, 0b0100_0011, 0b1100_0011, 0b0010_0011, 0b1010_0011, 0b0110_0011, 0b1110_0011, 0b0001_0011, 0b1001_0011, 0b0101_0011, 0b1101_0011, 0b0011_0011, 0b1011_0011, 0b0111_0011, 0b1111_0011, 0b0000_1011, 0b1000_1011, 0b0100_1011, 0b1100_1011, 0b0010_1011, 0b1010_1011, 0b0110_1011, 0b1110_1011, 0b0001_1011, 0b1001_1011, 0b0101_1011, 0b1101_1011, 0b0011_1011, 0b1011_1011, 0b0111_1011, 0b1111_1011, 0b0000_0111, 0b1000_0111, 0b0100_0111, 0b1100_0111, 0b0010_0111, 0b1010_0111, 0b0110_0111, 0b1110_0111, 0b0001_0111, 0b1001_0111, 0b0101_0111, 0b1101_0111, 0b0011_0111, 0b1011_0111, 0b0111_0111, 0b1111_0111, 0b0000_1111, 0b1000_1111, 0b0100_1111, 0b1100_1111, 0b0010_1111, 0b1010_1111, 0b0110_1111, 0b1110_1111, 0b0001_1111, 0b1001_1111, 0b0101_1111, 0b1101_1111, 0b0011_1111, 0b1011_1111, 0b0111_1111, 0b1111_1111 ]; #[derive(Clone, Copy)] struct BitState { n: u8, v: u32 } #[derive(Clone)] struct BitStream<'a> { bytes: slice::Iter<'a, u8>, used: usize, state: BitState } // Use this instead of triggering a panic (that will unwind). #[cfg(feature = "unstable")] fn abort() -> ! { unsafe { ::std::intrinsics::abort() } } #[cfg(not(feature = "unstable"))] fn abort() -> ! { panic!() } #[cfg(debug)] macro_rules! debug { ($($x:tt)*) => (println!($($x)*)) } #[cfg(not(debug))] macro_rules! debug { ($($x:tt)*) => (()) } impl<'a> BitStream<'a> { fn new(bytes: &'a [u8], state: BitState) -> BitStream<'a> { BitStream { bytes: bytes.iter(), used: 0, state: state } } fn use_byte(&mut self) -> bool { match self.bytes.next() { Some(&b) => { self.state.v |= (b as u32) << self.state.n; self.state.n += 8; self.used += 1; true } None => false } } fn need(&mut self, n: u8) -> bool { if self.state.n < n { if !self.use_byte() { return false; } if n > 8 && self.state.n < n { if n > 16 { // HACK(eddyb) in place of a static assert. abort(); } if !self.use_byte() { return false; } } } true } fn take16(&mut self, n: u8) -> Option<u16> { if self.need(n) { self.state.n -= n; let v = self.state.v & ((1 << n) - 1); self.state.v >>= n; Some(v as u16) } else { None } } fn take(&mut self, n: u8) -> Option<u8> { if n > 8 { // HACK(eddyb) in place of a static assert. abort(); } self.take16(n).map(|v: u16| v as u8) } fn fill(&mut self) -> BitState { while self.state.n + 8 <= 32 && self.use_byte() {} self.state } } macro_rules! with_codes (($clens:expr, $max_bits:expr => $code_ty:ty, $cb:expr) => ({ // Count the number of codes for each bit length. let mut bl_count = [0 as $code_ty; ($max_bits+1)]; for &bits in $clens.iter() { if bits != 0 { bl_count[bits as usize] += 1; } } // Compute the first code value for each bit length. let mut next_code = [0 as $code_ty; ($max_bits+1)]; // TODO use range_inclusive as soon as it is stable //for bits in range_inclusive(1, $max_bits) { for bits in 1..$max_bits + 1 { next_code[bits as usize] = (next_code[bits as usize - 1] + bl_count[bits as usize - 1]) << 1; } for (i, &bits) in $clens.iter().enumerate() { if bits != 0 { let code = next_code[bits as usize]; next_code[bits as usize] += 1; match $cb(i as $code_ty, code, bits) { Ok(()) => (), Err(err) => return Err(err) } } } })); struct CodeLengthReader { patterns: Box<[u8; 128]>, clens: Box<[u8; 19]>, result: Vec<u8>, num_lit: u16, num_dist: u8 } impl CodeLengthReader { fn new(clens: Box<[u8; 19]>, num_lit: u16, num_dist: u8) -> Result<CodeLengthReader, String> { // Fill in the 7-bit patterns that match each code. let mut patterns = Box::new([0xffu8; 128]); with_codes!(clens, 7 => u8, |i: u8, code: u8, bits| -> _ { /*let base = match BIT_REV_U8.get((code << (8 - bits)) as usize) { Some(&base) => base, None => return Err("invalid length code".to_owned()) }*/ let base = BIT_REV_U8[(code << (8 - bits)) as usize]; for rest in 0u8 .. 1u8 << (7 - bits) { patterns[(base | (rest << bits)) as usize] = i; } Ok(()) }); Ok(CodeLengthReader { patterns: patterns, clens: clens, result: Vec::with_capacity(num_lit as usize + num_dist as usize), num_lit: num_lit, num_dist: num_dist }) } fn read(&mut self, stream: &mut BitStream) -> Result<bool, String> { let total_len = self.num_lit as usize + self.num_dist as usize; while self.result.len() < total_len { if !stream.need(7) { return Ok(false); } let save = stream.clone(); macro_rules! take (($n:expr) => (match stream.take($n) { Some(v) => v, None => { *stream = save; return Ok(false); } })); let code = self.patterns[(stream.state.v & 0x7f) as usize]; stream.take(match self.clens.get(code as usize) { Some(&len) => len, None => return Err("invalid length code".to_owned()) }); match code { 0...15 => self.result.push(code), 16 => { let last = match self.result.last() { Some(&v) => v, // 16 appeared before anything else None => return Err("invalid length code".to_owned()) }; for _ in 0 .. 3 + take!(2) { self.result.push(last); } } 17 => for _ in 0 .. 3 + take!(3) { self.result.push(0); }, 18 => for _ in 0 .. 11 + take!(7) { self.result.push(0); }, _ => abort() } } Ok(true) } fn to_lit_and_dist(self) -> Result<(DynHuffman16, DynHuffman16), String> { let num_lit = self.num_lit as usize; let lit = try!(DynHuffman16::new(&self.result[..num_lit])); let dist = try!(DynHuffman16::new(&self.result[num_lit..])); Ok((lit, dist)) } } struct Trie8bit<T> { data: [T; 16], children: [Option<Box<[T; 16]>>; 16] } struct DynHuffman16 { patterns: Box<[u16; 256]>, rest: Vec<Trie8bit<u16>> } impl DynHuffman16 { fn new(clens: &[u8]) -> Result<DynHuffman16, String> { // Fill in the 8-bit patterns that match each code. // Longer patterns go into the trie. let mut patterns = Box::new([0xffffu16; 256]); let mut rest = Vec::new(); with_codes!(clens, 15 => u16, |i: u16, code: u16, bits: u8| -> _ { let entry = i | ((bits as u16) << 12); if bits <= 8 { let base = match BIT_REV_U8.get((code << (8 - bits)) as usize) { Some(&v) => v, None => return Err("invalid length code".to_owned()) }; for rest in 0u8 .. 1 << (8 - bits) { patterns[(base | (rest << (bits & 7))) as usize] = entry; } } else { let low = match BIT_REV_U8.get((code >> (bits - 8)) as usize) { Some(&v) => v, None => return Err("invalid length code".to_owned()) }; let high = BIT_REV_U8[((code << (16 - bits)) & 0xff) as usize]; let (min_bits, idx) = if patterns[low as usize] != 0xffff { let bits_prev = (patterns[low as usize] >> 12) as u8; (cmp::min(bits_prev, bits), patterns[low as usize] & 0x7ff) } else { rest.push(Trie8bit { data: [0xffff; 16], children: [ None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None ] }); (bits, (rest.len() - 1) as u16) }; patterns[low as usize] = idx | 0x800 | ((min_bits as u16) << 12); let trie_entry = match rest.get_mut(idx as usize) { Some(v) => v, None => return Err("invalid huffman code".to_owned()) }; if bits <= 12 { for rest in 0u8 .. 1 << (12 - bits) { trie_entry.data[(high | (rest << (bits - 8))) as usize] = entry; } } else { let child = &mut trie_entry.children[(high & 0xf) as usize]; if child.is_none() { *child = Some(Box::new([0xffff; 16])); } let child = &mut **child.as_mut().unwrap(); let high_top = high >> 4; for rest in 0u8 .. 1 << (16 - bits) { child[(high_top | (rest << (bits - 12))) as usize] = entry; } } } Ok(()) }); debug!("=== DYN HUFFMAN ==="); for _i in 0..256 { debug!("{:08b} {:04x}", i, patterns[BIT_REV_U8[_i] as usize]); } debug!("==================="); Ok(DynHuffman16 { patterns: patterns, rest: rest }) } fn read<'a>(&self, stream: &mut BitStream<'a>) -> Result<Option<(BitStream<'a>, u16)>, String> { let has8 = stream.need(8); let entry = self.patterns[(stream.state.v & 0xff) as usize]; let bits = (entry >> 12) as u8; Ok(if !has8 { if bits <= stream.state.n { let save = stream.clone(); stream.state.n -= bits; stream.state.v >>= bits; Some((save, entry & 0xfff)) } else { None } } else if bits <= 8 { let save = stream.clone(); stream.state.n -= bits; stream.state.v >>= bits; Some((save, entry & 0xfff)) } else { let has16 = stream.need(16); let trie = match self.rest.get((entry & 0x7ff) as usize) { Some(trie) => trie, None => return Err("invalid entry in stream".to_owned()) }; let idx = stream.state.v >> 8; let trie_entry = match trie.children[(idx & 0xf) as usize] { Some(ref child) => child[((idx >> 4) & 0xf) as usize], None => trie.data[(idx & 0xf) as usize] }; let trie_bits = (trie_entry >> 12) as u8; if has16 || trie_bits <= stream.state.n { let save = stream.clone(); stream.state.n -= trie_bits; stream.state.v >>= trie_bits; Some((save, trie_entry & 0xfff)) } else { None } }) } } enum State { ZlibMethodAndFlags, // CMF ZlibFlags(/* CMF */ u8), // FLG, Bits(BitsNext, BitState), LenDist((BitsNext, BitState), /* len */ u16, /* dist */ u16), Uncompressed(/* len */ u16), CheckCRC } use self::State::*; enum BitsNext { BlockHeader, BlockUncompressed, BlockFixed, BlockDynHlit, BlockDynHdist(/* hlit */ u8), BlockDynHclen(/* hlit */ u8, /* hdist */ u8), BlockDynClenCodeLengths(/* hlit */ u8, /* hdist */ u8, /* hclen */ u8, /* idx */ u8, /* clens */ Box<[u8; 19]>), BlockDynCodeLengths(CodeLengthReader), BlockDyn(/* lit/len */ DynHuffman16, /* dist */ DynHuffman16, /* prev_len */ u16) } use self::BitsNext::*; pub struct InflateStream { buffer: Vec<u8>, pos: u16, state: Option<State>, final_block: bool, } impl InflateStream { #[allow(dead_code)] pub fn new() -> InflateStream { let state = Bits(BlockHeader, BitState { n: 0, v: 0 }); let buffer = Vec::with_capacity(32 * 1024); InflateStream::with_state_and_buffer(state, buffer) } pub fn from_zlib() -> InflateStream { InflateStream::with_state_and_buffer(ZlibMethodAndFlags, Vec::new()) } fn with_state_and_buffer(state: State, buffer: Vec<u8>) -> InflateStream { InflateStream { buffer: buffer, pos: 0, state: Some(state), final_block: false } } fn run_len_dist(&mut self, len: u16, dist: u16) -> Result<Option<u16>, String> { debug!("RLE -{}; {} (cap={} len={})", dist, len, self.buffer.capacity(), self.buffer.len()); let buffer_size = self.buffer.capacity() as u16; let len = if self.pos < dist { // Handle copying from ahead, until we hit the end reading. let pos_end = self.pos + len; let (pos_end, left) = if pos_end < dist { (pos_end, 0) } else { (dist, pos_end - dist) }; let forward = buffer_size - dist; // assert for unsafe code: if pos_end + forward > self.buffer.len() as u16 { return Err("invalid run length in stream".to_owned()) } unsafe { // HACK(eddyb) avoid bound checks, LLVM can't optimize these. let buffer = self.buffer.as_mut_ptr(); let dst_end = buffer.offset(pos_end as isize); let mut dst = buffer.offset(self.pos as isize); let mut src = dst.offset(forward as isize); while dst < dst_end { *dst = *src; dst = dst.offset(1); src = src.offset(1); } } /* for i in self.pos as usize..pos_end as usize { self.buffer[i] = self.buffer[i + forward as usize] } */ self.pos = pos_end; left } else { len }; // Handle copying from before, until we hit the end writing. let pos_end = self.pos + len; let (pos_end, left) = if pos_end <= buffer_size { (pos_end, None) } else { (buffer_size, Some(pos_end - buffer_size)) }; if self.buffer.len() < pos_end as usize { unsafe { self.buffer.set_len(pos_end as usize); } } // assert for unsafe code: if self.pos < dist && pos_end > self.pos { return Err("invalid run length in stream".to_owned()) } unsafe { // HACK(eddyb) avoid bound checks, LLVM can't optimize these. let buffer = self.buffer.as_mut_ptr(); let dst_end = buffer.offset(pos_end as isize); let mut dst = buffer.offset(self.pos as isize); let mut src = dst.offset(-(dist as isize)); while dst < dst_end { *dst = *src; dst = dst.offset(1); src = src.offset(1); } } /* for i in self.pos as usize..pos_end as usize { self.buffer[i] = self.buffer[i - dist as usize] } */ self.pos = pos_end; Ok(left) } fn next_state(&mut self, data: &[u8]) -> Result<usize, String> { macro_rules! ok_bytes (($n:expr, $state:expr) => ({ self.state = Some($state); Ok($n) })); let debug_byte = |_i, _b| debug!("[{:04x}] {:02x}", _i, _b); macro_rules! push_or (($b:expr, $ret:expr) => (if self.pos < self.buffer.capacity() as u16 { let b = $b; debug_byte(self.pos, b); if (self.pos as usize) < self.buffer.len() { self.buffer[self.pos as usize] = b; } else { if (self.pos as usize) != self.buffer.len() { abort(); } self.buffer.push(b); } self.pos += 1; } else { return $ret; })); macro_rules! run_len_dist (($len:expr, $dist:expr => ($bytes:expr, $next:expr, $state:expr)) => ({ let dist = $dist; let left = try!(self.run_len_dist($len, dist)); match left { Some(len) => { return ok_bytes!($bytes, LenDist(($next, $state), len, dist)); } None => {} } })); match self.state.take().unwrap() { ZlibMethodAndFlags => { let b = data[0]; let (method, info) = (b & 0xF, b >> 4); debug!("ZLIB CM=0x{:x} CINFO=0x{:x}", method, info); match method { 8 => {/* DEFLATE */} _ => return Err(format!("unknown ZLIB method CM=0x{:x}", method)) } if info > 7 { return Err(format!("invalid ZLIB info CINFO=0x{:x}", info)); } //self.buffer = Vec::with_capacity(1 << (8 + info)); self.buffer = Vec::with_capacity(1 << (8 + info)); ok_bytes!(1, ZlibFlags(b)) } ZlibFlags(cmf) => { let b = data[0]; let (_check, dict, _level) = (b & 0x1F, (b & 0x20) != 0, b >> 6); debug!("ZLIB FCHECK=0x{:x} FDICT={} FLEVEL=0x{:x}", _check, dict, _level); if (((cmf as u16) << 8) | b as u16) % 31 != 0 { return Err(format!("invalid ZLIB checksum CMF=0x{:x} FLG=0x{:x}", cmf, b)); } if dict { return Err("unimplemented ZLIB FDICT=1".into()); } ok_bytes!(1, Bits(BlockHeader, BitState { n: 0, v: 0 })) } Bits(next, state) => { let mut stream = BitStream::new(data, state); macro_rules! ok_state (($state:expr) => ({self.state = Some($state); Ok(stream.used)})); macro_rules! ok (($next:expr) => (ok_state!(Bits($next, stream.fill())))); macro_rules! need (($n:expr) => (if !stream.need($n) { return ok!(next); })); macro_rules! take ( ($n:expr => $next:expr) => (match stream.take($n) { Some(v) => v, None => return ok!($next) }); ($n:expr) => (take!($n => next)) ); macro_rules! take16 ( ($n:expr => $next:expr) => (match stream.take16($n) { Some(v) => v, None => return ok!($next) }); ($n:expr) => (take16!($n => next)) ); macro_rules! len_dist ( ($len:expr, $code:expr, $bits:expr => $next_early:expr, $next:expr) => ({ let dist = 1 + if $bits == 0 { 0 } else { // new_base 2 << $bits } + (($code as u16 - if $bits == 0 { 0 } else { // old_base $bits * 2 + 2 }) << $bits) + take16!($bits => $next_early) as u16; run_len_dist!($len, dist => (stream.used, $next, stream.state)); }); ($len:expr, $code:expr, $bits:expr) => ( len_dist!($len, $code, $bits => next, next) ) ); match next { BlockHeader => { if self.final_block { return ok_state!(CheckCRC) } let h = take!(3); let (final_, block_type) = ((h & 1) != 0, (h >> 1) & 0b11); self.final_block = final_; match block_type { 0 => { // Skip to the next byte for an uncompressed block. stream.state = BitState { n: 0, v: 0 }; ok!(BlockUncompressed) } 1 => { /*let lit = DynHuffman16::new(&[ 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, // 0-15 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, // 16-31 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, // 32-47 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, // 48-63 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, // 64-79 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, // 80-95 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, // 96-101 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, // 112-127 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, // 128-143 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 144-159 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 160-175 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 176-191 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 192-207 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 208-223 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 224-239 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 240-255 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, // 256-271 7, 7, 7, 7, 7, 7, 7, 7, // 272-279 8, 8, 8, 8, 8, 8, 8, 8, // 280-287 ]); let dist = DynHuffman16::new(&[ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5 ]); ok!(BlockDyn(lit, dist, 0)) */ ok!(BlockFixed) } 2 => ok!(BlockDynHlit), _ => Err(format!("unimplemented DEFLATE block type 0b{:?}", block_type)) } } BlockUncompressed => { let len = take16!(16); let nlen = take16!(16); assert_eq!(stream.state.n, 0); if !len != nlen { return Err("invalid uncompressed block len".to_owned()) } ok_state!(Uncompressed(len)) } BlockFixed => { let mut save; macro_rules! len_dist2 (($len:expr, $code_const:expr, $code_rev:expr, $bits:expr) => ({ len_dist!($len, $code_const + ($code_rev >> 4), $bits => {stream = save; next}, next); })); macro_rules! len (($code:expr, $bits:expr) => ({ let len = 3 + if $bits == 0 { 0 } else { // new_base 4 << $bits } + ((if $code == 29 { 256 } else { $code as u16 } - if $bits == 0 { 0 } else { // old_base $bits * 4 + 4 } - 1) << $bits) + take!($bits => {stream = save; next}) as u16; let code = take!(5 => {stream = save; next}); debug!(" {:05b}", BIT_REV_U8[(code << 3) as usize]); match code { 0b00000 | 0b10000 => len_dist2!(len, 0, code, 0), 0b01000 | 0b11000 => len_dist2!(len, 2, code, 0), 0b00100 | 0b10100 => len_dist2!(len, 4, code, 1), 0b01100 | 0b11100 => len_dist2!(len, 6, code, 2), 0b00010 | 0b10010 => len_dist2!(len, 8, code, 3), 0b01010 | 0b11010 => len_dist2!(len, 10, code, 4), 0b00110 | 0b10110 => len_dist2!(len, 12, code, 5), 0b01110 | 0b11110 => len_dist2!(len, 14, code, 6), 0b00001 | 0b10001 => len_dist2!(len, 16, code, 7), 0b01001 | 0b11001 => len_dist2!(len, 18, code, 8), 0b00101 | 0b10101 => len_dist2!(len, 20, code, 9), 0b01101 | 0b11101 => len_dist2!(len, 22, code, 10), 0b00011 | 0b10011 => len_dist2!(len, 24, code, 11), 0b01011 | 0b11011 => len_dist2!(len, 26, code, 12), 0b00111 | 0b10111 => len_dist2!(len, 28, code, 13), _ => return Err(format!("bad DEFLATE dist code {}", code)) } })); loop { need!(7); // 0000000 through 0010111 if (stream.state.v & 0b11) == 0b00 && (stream.state.v & 0b1100) != 0b1100 { save = stream.clone(); // FIXME(eddyb) use a 7-bit rev LUT or match the huffman code directly. let code = BIT_REV_U8[(stream.take(7).unwrap() << 1) as usize]; debug!("{:09b}", code as u16 + 256); match code { 0 => return if self.final_block { ok_state!(CheckCRC) } else { ok!(BlockHeader) }, 1...8 => len!(code, 0), 9...12 => len!(code, 1), 13...16 => len!(code, 2), 17...20 => len!(code, 3), 21...23 => len!(code, 4), _ => return Err(format!("bad DEFLATE len code {}", code as u16 + 256)) }; continue; } need!(8); // 00110000 through 10111111 if (stream.state.v & 0b11) != 0b11 { save = stream.clone(); // FIXME(eddyb) use a specialized rev LUT with addend. let code = BIT_REV_U8[(stream.take(8).unwrap()) as usize] - 0b0011_0000; debug!("{:09b}", code); push_or!(code, ok!({stream = save; next})); continue; } // 11000000 through 11000111 if (stream.state.v & 0b11100) == 0b00000 { save = stream.clone(); // FIXME(eddyb) use a 3-bit rev LUT or match the huffman code directly. let code = 24 + (BIT_REV_U8[stream.take(8).unwrap() as usize] - 0b11000000); debug!("{:09b}", code as u16 + 256); match code { 24 => len!(24, 4), 25...28 => len!(code, 5), 29 => len!(29, 0), _ => return Err(format!("bad DEFLATE len code {}", code as u16 + 256)) }; continue; } need!(9); // 110010000 through 111111111 save = stream.clone(); // FIXME(eddyb) use a specialized rev LUT with addend. let code = BIT_REV_U8[(stream.take16(9).unwrap() >> 1) as usize]; debug!("{:09b}", code); push_or!(code, ok!({stream = save; next})); } }<|fim▁hole|> ok!(BlockDynClenCodeLengths(hlit, hdist, take!(4) + 4, 0, Box::new([0; 19]))) } BlockDynClenCodeLengths(hlit, hdist, hclen, i, mut clens) => { let v = match stream.take(3) { Some(v) => v, None => return ok!(BlockDynClenCodeLengths(hlit, hdist, hclen, i, clens)) }; clens[[16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15][i as usize]] = v; if i < hclen - 1 { ok!(BlockDynClenCodeLengths(hlit, hdist, hclen, i + 1, clens)) } else { ok!(BlockDynCodeLengths(try!(CodeLengthReader::new(clens, hlit as u16 + 256, hdist)))) } } BlockDynCodeLengths(mut reader) => { let finished = try!(reader.read(&mut stream)); if finished { let (lit, dist) = try!(reader.to_lit_and_dist()); ok!(BlockDyn(lit, dist, 0)) } else { ok!(BlockDynCodeLengths(reader)) } } BlockDyn(huff_lit_len, huff_dist, mut prev_len) => { macro_rules! next (($save_len:expr) => (BlockDyn(huff_lit_len, huff_dist, $save_len))); loop { let len = if prev_len != 0 { let len = prev_len; prev_len = 0; len } else { let (save, code16) = match try!(huff_lit_len.read(&mut stream)) { Some(data) => data, None => return ok!(next!(0)) }; let code = code16 as u8; debug!("{:09b}", code16); match code16 { 0...255 => { push_or!(code, ok!({stream = save; next!(0)})); continue; } 256...285 => {} _ => return Err(format!("bad DEFLATE len code {}", code)) } macro_rules! len (($code:expr, $bits:expr) => ( 3 + if $bits == 0 { 0 } else { // new_base 4 << $bits } + ((if $code == 29 { 256 } else { $code as u16 } - if $bits == 0 { 0 } else { // old_base $bits * 4 + 4 } - 1) << $bits) + take!($bits => {stream = save; next!(0)}) as u16 )); match code { 0 => return if self.final_block { ok_state!(CheckCRC) } else { ok!(BlockHeader) }, 1...8 => len!(code, 0), 9...12 => len!(code, 1), 13...16 => len!(code, 2), 17...20 => len!(code, 3), 21...24 => len!(code, 4), 25...28 => len!(code, 5), 29 => len!(29, 0), _ => return Err(format!("bad DEFLATE len code {}", code as u16 + 256)) } }; let (save, dist_code) = match try!(huff_dist.read(&mut stream)) { Some(data) => data, None => return ok!(next!(len)) }; debug!(" {:05b}", dist_code); macro_rules! len_dist_case (($bits:expr) => ( len_dist!(len, dist_code, $bits => {stream = save; next!(len)}, next!(0)) )); match dist_code { 0...3 => len_dist_case!(0), 4...5 => len_dist_case!(1), 6...7 => len_dist_case!(2), 8...9 => len_dist_case!(3), 10...11 => len_dist_case!(4), 12...13 => len_dist_case!(5), 14...15 => len_dist_case!(6), 16...17 => len_dist_case!(7), 18...19 => len_dist_case!(8), 20...21 => len_dist_case!(9), 22...23 => len_dist_case!(10), 24...25 => len_dist_case!(11), 26...27 => len_dist_case!(12), 28...29 => len_dist_case!(13), _ => return Err(format!("bad DEFLATE dist code {}", dist_code)) } } } } } LenDist((next, state), len, dist) => { run_len_dist!(len, dist => (0, next, state)); ok_bytes!(0, Bits(next, state)) } Uncompressed(mut len) => { for (i, &b) in data.iter().enumerate() { if len == 0 { return ok_bytes!(i, Bits(BlockHeader, BitState { n: 0, v: 0 })); } push_or!(b, ok_bytes!(i, Uncompressed(len))); len -= 1; } ok_bytes!(data.len(), Uncompressed(len)) } CheckCRC => { let _b = data[0]; debug!("CRC {:02x}", _b); ok_bytes!(1, CheckCRC) } } } pub fn update<'a>(&'a mut self, mut data: &[u8]) -> Result<(usize, &'a [u8]), String> { let original_size = data.len(); let original_pos = self.pos as usize; while data.len() > 0 && ((self.pos as usize) < self.buffer.capacity() || self.buffer.capacity() == 0) { match self.next_state(data) { Ok(n) => { data = &data[n..]; } Err(m) => return Err(m) } } let output = &self.buffer[original_pos .. self.pos as usize]; if self.pos as usize >= self.buffer.capacity() { self.pos = 0; } Ok((original_size - data.len(), output)) } }<|fim▁end|>
BlockDynHlit => ok!(BlockDynHdist(take!(5) + 1)), BlockDynHdist(hlit) => ok!(BlockDynHclen(hlit, take!(5) + 1)), BlockDynHclen(hlit, hdist) => {
<|file_name|>call_interpolate.py<|end_file_name|><|fim▁begin|>from bundleprocessing import interpolateMetric import pandas as pd import nibabel as nib import argparse parser = argparse.ArgumentParser() parser.add_argument('-templateBundle', '--templateBundle', required = True) parser.add_argument('-subjectBundle', '--subjectBundle', required = True) parser.add_argument('-metric', '--metric', required = True) parser.add_argument('-numPoints', '--numPoints', type=int, required = True) <|fim▁hole|>parser.add_argument('-outMetrics', '--outMetrics', required = True) args = parser.parse_args() tracks, hdr = nib.trackvis.read(args.templateBundle) templateBundle = [trk[0] for trk in tracks] tracks, hdr = nib.trackvis.read(args.subjectBundle) subjectBundle = [trk[0] for trk in tracks] image = nib.load(args.metric) metric = image.get_data() subjectTracks,scalars = interpolateMetric(templateBundle, subjectBundle, metric, hdr['voxel_size'], args.numPoints, args.flip) nib.trackvis.write(args.outTracks,subjectTracks,hdr) data = pd.DataFrame(scalars,columns=["Metrics"]) data.to_csv(args.outMetrics,index=False)<|fim▁end|>
parser.add_argument('-flip', '--flip', type=bool, required = True) parser.add_argument('-outTracks', '--outTracks', required = True)
<|file_name|>halfspace.cpp<|end_file_name|><|fim▁begin|>/* * Software License Agreement (BSD License) * * Copyright (c) 2011-2014, Willow Garage, Inc. * Copyright (c) 2014-2016, Open Source Robotics Foundation * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * Neither the name of Open Source Robotics Foundation nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /** @author Jia Pan */ #include "fcl/narrowphase/detail/primitive_shape_algorithm/halfspace-inl.h" namespace fcl { namespace detail { //============================================================================== template <> float halfspaceIntersectTolerance() { return 0.0001f;<|fim▁hole|>template <> double halfspaceIntersectTolerance() { return 0.0000001; } //============================================================================== template bool sphereHalfspaceIntersect( const Sphere<double>& s1, const Transform3<double>& tf1, const Halfspace<double>& s2, const Transform3<double>& tf2, std::vector<ContactPoint<double>>* contacts); //============================================================================== template bool ellipsoidHalfspaceIntersect( const Ellipsoid<double>& s1, const Transform3<double>& tf1, const Halfspace<double>& s2, const Transform3<double>& tf2, std::vector<ContactPoint<double>>* contacts); //============================================================================== template bool boxHalfspaceIntersect( const Box<double>& s1, const Transform3<double>& tf1, const Halfspace<double>& s2, const Transform3<double>& tf2); //============================================================================== template bool boxHalfspaceIntersect( const Box<double>& s1, const Transform3<double>& tf1, const Halfspace<double>& s2, const Transform3<double>& tf2, std::vector<ContactPoint<double>>* contacts); //============================================================================== template bool capsuleHalfspaceIntersect( const Capsule<double>& s1, const Transform3<double>& tf1, const Halfspace<double>& s2, const Transform3<double>& tf2, std::vector<ContactPoint<double>>* contacts); //============================================================================== template bool cylinderHalfspaceIntersect( const Cylinder<double>& s1, const Transform3<double>& tf1, const Halfspace<double>& s2, const Transform3<double>& tf2, std::vector<ContactPoint<double>>* contacts); //============================================================================== template bool coneHalfspaceIntersect( const Cone<double>& s1, const Transform3<double>& tf1, const Halfspace<double>& s2, const Transform3<double>& tf2, std::vector<ContactPoint<double>>* contacts); //============================================================================== template bool convexHalfspaceIntersect( const Convex<double>& s1, const Transform3<double>& tf1, const Halfspace<double>& s2, const Transform3<double>& tf2, Vector3<double>* contact_points, double* penetration_depth, Vector3<double>* normal); //============================================================================== template bool halfspaceTriangleIntersect( const Halfspace<double>& s1, const Transform3<double>& tf1, const Vector3<double>& P1, const Vector3<double>& P2, const Vector3<double>& P3, const Transform3<double>& tf2, Vector3<double>* contact_points, double* penetration_depth, Vector3<double>* normal); //============================================================================== template bool planeHalfspaceIntersect( const Plane<double>& s1, const Transform3<double>& tf1, const Halfspace<double>& s2, const Transform3<double>& tf2, Plane<double>& pl, Vector3<double>& p, Vector3<double>& d, double& penetration_depth, int& ret); //============================================================================== template bool halfspacePlaneIntersect( const Halfspace<double>& s1, const Transform3<double>& tf1, const Plane<double>& s2, const Transform3<double>& tf2, Plane<double>& pl, Vector3<double>& p, Vector3<double>& d, double& penetration_depth, int& ret); //============================================================================== template bool halfspaceIntersect( const Halfspace<double>& s1, const Transform3<double>& tf1, const Halfspace<double>& s2, const Transform3<double>& tf2, Vector3<double>& p, Vector3<double>& d, Halfspace<double>& s, double& penetration_depth, int& ret); } // namespace detail } // namespace fcl<|fim▁end|>
} //==============================================================================
<|file_name|>OffHeapStarTreeBuilder.java<|end_file_name|><|fim▁begin|>/** * Copyright (C) 2014-2015 LinkedIn Corp. ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.core.startree; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.tuple.Pair; import org.joda.time.DateTime; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Objects; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; import com.linkedin.pinot.common.data.DimensionFieldSpec; import com.linkedin.pinot.common.data.MetricFieldSpec; import com.linkedin.pinot.common.data.FieldSpec.DataType; import com.linkedin.pinot.common.data.Schema; import com.linkedin.pinot.common.utils.Pairs.IntPair; import com.linkedin.pinot.core.data.GenericRow; import com.linkedin.pinot.core.segment.creator.impl.V1Constants; /** * Uses file to build the star tree. Each row is divided into dimension and metrics. Time is added to dimension list. * We use the split order to build the tree. In most cases, split order will be ranked depending on the cardinality (descending order). * Time column will be excluded or last entry in split order irrespective of its cardinality * This is a recursive algorithm where we branch on one dimension at every level. * * <b>Psuedo algo</b> * <code> * * build(){ * let table(1,N) consists of N input rows * table.sort(1,N) //sort the table on all dimensions, according to split order * constructTree(table, 0, N, 0); * } * constructTree(table,start,end, level){ * splitDimensionName = dimensionsSplitOrder[level] * groupByResult<dimName, length> = table.groupBy(dimensionsSplitOrder[level]); //returns the number of rows for each value in splitDimension * int rangeStart = 0; * for each ( entry<dimName,length> groupByResult){ * if(entry.length > minThreshold){ * constructTree(table, rangeStart, rangeStart + entry.length, level +1); * } * rangeStart = rangeStart + entry.length; * updateStarTree() //add new child * } * * //create a star tree node * * aggregatedRows = table.uniqueAfterRemovingAttributeAndAggregateMetrics(start,end, splitDimensionName); * for(each row in aggregatedRows_ * table.add(row); * if(aggregateRows.size > minThreshold) { * table.sort(end, end + aggregatedRows.size); * constructStarTree(table, end, end + aggregatedRows.size, level +1); * } * } * </code> */ public class OffHeapStarTreeBuilder implements StarTreeBuilder { private static final Logger LOG = LoggerFactory.getLogger(OffHeapStarTreeBuilder.class); File dataFile; private DataOutputStream dataBuffer; int rawRecordCount = 0; int aggRecordCount = 0; private List<String> dimensionsSplitOrder; private Set<String> skipStarNodeCreationForDimensions; private Set<String> skipMaterializationForDimensions; private int maxLeafRecords; private StarTree starTree; private StarTreeIndexNode starTreeRootIndexNode; private int numDimensions; private int numMetrics; private List<String> dimensionNames; private List<String> metricNames; private String timeColumnName; private List<DataType> dimensionTypes; private List<DataType> metricTypes; private Map<String, Object> dimensionNameToStarValueMap; private HashBiMap<String, Integer> dimensionNameToIndexMap; private Map<String, Integer> metricNameToIndexMap; private int dimensionSizeBytes; private int metricSizeBytes; private File outDir; private Map<String, HashBiMap<Object, Integer>> dictionaryMap; boolean debugMode = false; private int[] sortOrder; private int skipMaterializationCardinalityThreshold; public void init(StarTreeBuilderConfig builderConfig) throws Exception { Schema schema = builderConfig.schema; timeColumnName = schema.getTimeColumnName(); this.dimensionsSplitOrder = builderConfig.dimensionsSplitOrder; skipStarNodeCreationForDimensions = builderConfig.getSkipStarNodeCreationForDimensions(); skipMaterializationForDimensions = builderConfig.getSkipMaterializationForDimensions(); skipMaterializationCardinalityThreshold = builderConfig.getSkipMaterializationCardinalityThreshold(); this.maxLeafRecords = builderConfig.maxLeafRecords; this.outDir = builderConfig.getOutDir(); if (outDir == null) { outDir = new File(System.getProperty("java.io.tmpdir"), V1Constants.STAR_TREE_INDEX_DIR + "_" + DateTime.now()); } LOG.debug("Index output directory:{}", outDir); dimensionTypes = new ArrayList<>(); dimensionNames = new ArrayList<>(); dimensionNameToIndexMap = HashBiMap.create(); dimensionNameToStarValueMap = new HashMap<>(); dictionaryMap = new HashMap<>(); //READ DIMENSIONS COLUMNS List<DimensionFieldSpec> dimensionFieldSpecs = schema.getDimensionFieldSpecs(); for (int index = 0; index < dimensionFieldSpecs.size(); index++) { DimensionFieldSpec spec = dimensionFieldSpecs.get(index); String dimensionName = spec.getName(); dimensionNames.add(dimensionName); dimensionNameToIndexMap.put(dimensionName, index); Object starValue; starValue = getAllStarValue(spec); dimensionNameToStarValueMap.put(dimensionName, starValue); dimensionTypes.add(spec.getDataType()); HashBiMap<Object, Integer> dictionary = HashBiMap.create(); dictionaryMap.put(dimensionName, dictionary); } //treat time column as just another dimension, only difference is that we will never split on this dimension unless explicitly specified in split order if (timeColumnName != null) { dimensionNames.add(timeColumnName); dimensionTypes.add(schema.getTimeFieldSpec().getDataType()); int index = dimensionNameToIndexMap.size(); dimensionNameToIndexMap.put(timeColumnName, index); HashBiMap<Object, Integer> dictionary = HashBiMap.create(); dictionaryMap.put(schema.getTimeColumnName(), dictionary); } dimensionSizeBytes = dimensionNames.size() * Integer.SIZE / 8; this.numDimensions = dimensionNames.size(); //READ METRIC COLUMNS this.metricTypes = new ArrayList<>(); this.metricNames = new ArrayList<>(); this.metricNameToIndexMap = new HashMap<>(); this.metricSizeBytes = 0; List<MetricFieldSpec> metricFieldSpecs = schema.getMetricFieldSpecs(); for (int index = 0; index < metricFieldSpecs.size(); index++) { MetricFieldSpec spec = metricFieldSpecs.get(index); String metricName = spec.getName(); metricNames.add(metricName); metricNameToIndexMap.put(metricName, index); DataType dataType = spec.getDataType(); metricTypes.add(dataType); metricSizeBytes += dataType.size(); } this.numMetrics = metricNames.size(); builderConfig.getOutDir().mkdirs(); dataFile = new File(outDir, "star-tree.buf"); dataBuffer = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(dataFile))); //INITIALIZE THE ROOT NODE this.starTreeRootIndexNode = new StarTreeIndexNode(); this.starTreeRootIndexNode.setDimensionName(StarTreeIndexNode.all()); this.starTreeRootIndexNode.setDimensionValue(StarTreeIndexNode.all()); this.starTreeRootIndexNode.setLevel(0); LOG.debug("dimensionNames:{}", dimensionNames); LOG.debug("metricNames:{}", metricNames); } /** * Validate the split order by removing any dimensions that may be part of the skip materialization list. * @param dimensionsSplitOrder * @param skipMaterializationForDimensions * @return */ private List<String> sanitizeSplitOrder(List<String> dimensionsSplitOrder, Set<String> skipMaterializationForDimensions) { List<String> validatedSplitOrder = new ArrayList<String>(); for (String dimension : dimensionsSplitOrder) { if (skipMaterializationForDimensions == null || !skipMaterializationForDimensions.contains(dimension)) { LOG.info("Adding dimension {} to split order", dimension); validatedSplitOrder.add(dimension); } else { LOG.info( "Dimension {} cannot be part of 'dimensionSplitOrder' and 'skipMaterializationForDimensions', removing it from split order", dimension); } } return validatedSplitOrder; } private Object getAllStarValue(DimensionFieldSpec spec) throws Exception { switch (spec.getDataType()) { case STRING: return "ALL"; case BOOLEAN: case BYTE: case CHAR: case DOUBLE: case FLOAT: case INT: case LONG: return spec.getDefaultNullValue(); case OBJECT: case SHORT: case DOUBLE_ARRAY: case CHAR_ARRAY: case FLOAT_ARRAY: case INT_ARRAY: case LONG_ARRAY: case SHORT_ARRAY: case STRING_ARRAY: case BYTE_ARRAY: default: throw new Exception("Unsupported dimension data type" + spec); } } public GenericRow toGenericRow(DimensionBuffer dimensionKey, MetricBuffer metricsHolder) { GenericRow row = new GenericRow(); Map<String, Object> map = new HashMap<>(); for (int i = 0; i < dimensionNames.size(); i++) { String dimName = dimensionNames.get(i); BiMap<Integer, Object> inverseDictionary = dictionaryMap.get(dimName).inverse(); Object dimValue = inverseDictionary.get(dimensionKey.getDimension(i)); if (dimValue == null) { dimValue = dimensionNameToStarValueMap.get(dimName); } map.put(dimName, dimValue); } for (int i = 0; i < numMetrics; i++) { String metName = metricNames.get(i); map.put(metName, metricsHolder.get(i)); } row.init(map); return row; } public void append(GenericRow row) throws Exception { DimensionBuffer dimension = new DimensionBuffer(numDimensions); for (int i = 0; i < dimensionNames.size(); i++) { String dimName = dimensionNames.get(i); Map<Object, Integer> dictionary = dictionaryMap.get(dimName); Object dimValue = row.getValue(dimName); if (dimValue == null) { //TODO: Have another default value to represent STAR. Using default value to represent STAR as of now. //It does not matter during query execution, since we know that values is STAR from the star tree dimValue = dimensionNameToStarValueMap.get(dimName); } if (!dictionary.containsKey(dimValue)) {<|fim▁hole|> Number[] numbers = new Number[numMetrics]; for (int i = 0; i < numMetrics; i++) { String metName = metricNames.get(i); numbers[i] = (Number) row.getValue(metName); } MetricBuffer metrics = new MetricBuffer(numbers); append(dimension, metrics); } public void append(DimensionBuffer dimension, MetricBuffer metrics) throws Exception { appendToRawBuffer(dimension, metrics); } private void appendToRawBuffer(DimensionBuffer dimension, MetricBuffer metrics) throws IOException { appendToBuffer(dataBuffer, dimension, metrics); rawRecordCount++; } private void appendToAggBuffer(DimensionBuffer dimension, MetricBuffer metrics) throws IOException { appendToBuffer(dataBuffer, dimension, metrics); aggRecordCount++; } private void appendToBuffer(DataOutputStream dos, DimensionBuffer dimensions, MetricBuffer metricHolder) throws IOException { for (int i = 0; i < numDimensions; i++) { dos.writeInt(dimensions.getDimension(i)); } dos.write(metricHolder.toBytes(metricSizeBytes, metricTypes)); } public void build() throws Exception { if (skipMaterializationForDimensions == null || skipMaterializationForDimensions.isEmpty()) { skipMaterializationForDimensions = computeDefaultDimensionsToSkipMaterialization(); } if (dimensionsSplitOrder == null || dimensionsSplitOrder.isEmpty()) { dimensionsSplitOrder = computeDefaultSplitOrder(); } // Remove any dimensions from split order that would be not be materialized. dimensionsSplitOrder = sanitizeSplitOrder(dimensionsSplitOrder, skipMaterializationForDimensions); LOG.debug("Split order:{}", dimensionsSplitOrder); long start = System.currentTimeMillis(); dataBuffer.flush(); sort(dataFile, 0, rawRecordCount); constructStarTree(starTreeRootIndexNode, 0, rawRecordCount, 0, dataFile); long end = System.currentTimeMillis(); LOG.debug("Took {} ms to build star tree index. Original records:{} Materialized record:{}", (end - start), rawRecordCount, aggRecordCount); starTree = new StarTree(starTreeRootIndexNode, dimensionNameToIndexMap); File treeBinary = new File(outDir, "star-tree.bin"); LOG.debug("Saving tree binary at: {} ", treeBinary); starTree.writeTree(new BufferedOutputStream(new FileOutputStream(treeBinary))); printTree(starTreeRootIndexNode, 0); LOG.debug("Finished build tree. out dir: {} ", outDir); dataBuffer.close(); } private void printTree(StarTreeIndexNode node, int level) { for (int i = 0; i < level; i++) { LOG.debug(" "); } BiMap<Integer, String> inverse = dimensionNameToIndexMap.inverse(); String dimName = "ALL"; Object dimValue = "ALL"; if (node.getDimensionName() != StarTreeIndexNode.all()) { dimName = inverse.get(node.getDimensionName()); } if (node.getDimensionValue() != StarTreeIndexNode.all()) { dimValue = dictionaryMap.get(dimName).inverse().get(node.getDimensionValue()); } String formattedOutput = Objects.toStringHelper(node).add("nodeId", node.getNodeId()).add("level", level).add("dimensionName", dimName) .add("dimensionValue", dimValue).add("childDimensionName", inverse.get(node.getChildDimensionName())) .add("childCount", node.getChildren() == null ? 0 : node.getChildren().size()) .add("startDocumentId", node.getStartDocumentId()).add("endDocumentId", node.getEndDocumentId()) .add("documentCount", (node.getEndDocumentId() - node.getStartDocumentId())).toString(); LOG.debug(formattedOutput); if (!node.isLeaf()) { for (StarTreeIndexNode child : node.getChildren().values()) { printTree(child, level + 1); } } } private List<String> computeDefaultSplitOrder() { ArrayList<String> defaultSplitOrder = new ArrayList<>(); //include only the dimensions not time column. Also, assumes that skipMaterializationForDimensions is built. for (String dimensionName : dimensionNames) { if (skipMaterializationForDimensions != null && !skipMaterializationForDimensions.contains(dimensionName)) { defaultSplitOrder.add(dimensionName); } } if (timeColumnName != null) { defaultSplitOrder.remove(timeColumnName); } Collections.sort(defaultSplitOrder, new Comparator<String>() { @Override public int compare(String o1, String o2) { return dictionaryMap.get(o2).size() - dictionaryMap.get(o1).size(); //descending } }); return defaultSplitOrder; } private Set<String> computeDefaultDimensionsToSkipMaterialization() { Set<String> skipDimensions = new HashSet<String>(); for (String dimensionName : dimensionNames) { if (dictionaryMap.get(dimensionName).size() > skipMaterializationCardinalityThreshold) { skipDimensions.add(dimensionName); } } return skipDimensions; } /* * Sorts the file on all dimensions */ private void sort(File file, int startDocId, int endDocId) throws IOException { if (debugMode) { LOG.info("BEFORE SORTING"); printFile(file, startDocId, endDocId); } StarTreeDataTable dataSorter = new StarTreeDataTable(file, dimensionSizeBytes, metricSizeBytes, getSortOrder()); dataSorter.sort(startDocId, endDocId, 0, dimensionSizeBytes); if (debugMode) { LOG.info("AFTER SORTING"); printFile(file, startDocId, endDocId); } } private int[] getSortOrder() { if (sortOrder == null) { sortOrder = new int[dimensionNames.size()]; for (int i = 0; i < dimensionsSplitOrder.size(); i++) { sortOrder[i] = dimensionNameToIndexMap.get(dimensionsSplitOrder.get(i)); } //add remaining dimensions that were not part of dimensionsSplitOrder int counter = 0; for (String dimName : dimensionNames) { if (!dimensionsSplitOrder.contains(dimName)) { sortOrder[dimensionsSplitOrder.size() + counter] = dimensionNameToIndexMap.get(dimName); counter = counter + 1; } } } return sortOrder; } private void printFile(File file, int startDocId, int endDocId) throws IOException { LOG.info("Contents of file:{} from:{} to:{}", file.getName(), startDocId, endDocId); StarTreeDataTable dataSorter = new StarTreeDataTable(file, dimensionSizeBytes, metricSizeBytes, getSortOrder()); Iterator<Pair<byte[], byte[]>> iterator = dataSorter.iterator(startDocId, endDocId); int numRecordsToPrint = 100; int counter = 0; while (iterator.hasNext()) { Pair<byte[], byte[]> next = iterator.next(); LOG.info("{}, {}", DimensionBuffer.fromBytes(next.getLeft()), MetricBuffer.fromBytes(next.getRight(), metricTypes)); if (counter++ == numRecordsToPrint) { break; } } } private int constructStarTree(StarTreeIndexNode node, int startDocId, int endDocId, int level, File file) throws Exception { //node.setStartDocumentId(startDocId); int docsAdded = 0; if (level == dimensionsSplitOrder.size() - 1) { return 0; } String splitDimensionName = dimensionsSplitOrder.get(level); Integer splitDimensionId = dimensionNameToIndexMap.get(splitDimensionName); LOG.debug("Building tree at level:{} using file:{} from startDoc:{} endDocId:{} splitting on dimension:{}", level, file.getName(), startDocId, endDocId, splitDimensionName); Map<Integer, IntPair> sortGroupBy = groupBy(startDocId, endDocId, splitDimensionId, file); LOG.debug("Group stats:{}", sortGroupBy); node.setChildDimensionName(splitDimensionId); node.setChildren(new HashMap<Integer, StarTreeIndexNode>()); for (int childDimensionValue : sortGroupBy.keySet()) { StarTreeIndexNode child = new StarTreeIndexNode(); child.setDimensionName(splitDimensionId); child.setDimensionValue(childDimensionValue); child.setParent(node); child.setLevel(node.getLevel() + 1); // n.b. We will number the nodes later using BFS after fully split // Add child to parent node.getChildren().put(childDimensionValue, child); int childDocs = 0; IntPair range = sortGroupBy.get(childDimensionValue); if (range.getRight() - range.getLeft() > maxLeafRecords) { childDocs = constructStarTree(child, range.getLeft(), range.getRight(), level + 1, file); docsAdded += childDocs; } // Either range <= maxLeafRecords, or we did not split further (last level). if (childDocs == 0) { child.setStartDocumentId(range.getLeft()); child.setEndDocumentId(range.getRight()); } } // Return if star node does not need to be created. if (skipStarNodeCreationForDimensions != null && skipStarNodeCreationForDimensions.contains(splitDimensionName)) { return docsAdded; } //create star node StarTreeIndexNode starChild = new StarTreeIndexNode(); starChild.setDimensionName(splitDimensionId); starChild.setDimensionValue(StarTreeIndexNode.all()); starChild.setParent(node); starChild.setLevel(node.getLevel() + 1); // n.b. We will number the nodes later using BFS after fully split // Add child to parent node.getChildren().put(StarTreeIndexNode.all(), starChild); Iterator<Pair<DimensionBuffer, MetricBuffer>> iterator = uniqueCombinations(startDocId, endDocId, file, splitDimensionId); int rowsAdded = 0; int startOffset = rawRecordCount + aggRecordCount; while (iterator.hasNext()) { Pair<DimensionBuffer, MetricBuffer> next = iterator.next(); DimensionBuffer dimension = next.getLeft(); MetricBuffer metricsHolder = next.getRight(); LOG.debug("Adding row:{}", dimension); appendToAggBuffer(dimension, metricsHolder); rowsAdded++; } docsAdded += rowsAdded; LOG.debug("Added {} additional records at level {}", rowsAdded, level); //flush dataBuffer.flush(); int childDocs = 0; if (rowsAdded >= maxLeafRecords) { sort(dataFile, startOffset, startOffset + rowsAdded); childDocs = constructStarTree(starChild, startOffset, startOffset + rowsAdded, level + 1, dataFile); docsAdded += childDocs; } // Either rowsAdded < maxLeafRecords, or we did not split further (last level). if (childDocs == 0) { starChild.setStartDocumentId(startOffset); starChild.setEndDocumentId(startOffset + rowsAdded); } //node.setEndDocumentId(endDocId + docsAdded); return docsAdded; } /** * Assumes the file is already sorted, returns the unique combinations after removing a specified dimension. * Aggregates the metrics for each unique combination, currently only sum is supported by default * @param startDocId * @param endDocId * @param file * @param splitDimensionId * @return * @throws Exception */ private Iterator<Pair<DimensionBuffer, MetricBuffer>> uniqueCombinations(int startDocId, int endDocId, File file, int splitDimensionId) throws Exception { StarTreeDataTable dataSorter = new StarTreeDataTable(file, dimensionSizeBytes, metricSizeBytes, getSortOrder()); Iterator<Pair<byte[], byte[]>> iterator1 = dataSorter.iterator(startDocId, endDocId); File tempFile = new File(outDir, file.getName() + "_" + startDocId + "_" + endDocId + ".unique.tmp"); DataOutputStream dos = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(tempFile))); while (iterator1.hasNext()) { Pair<byte[], byte[]> next = iterator1.next(); byte[] dimensionBuffer = next.getLeft(); byte[] metricBuffer = next.getRight(); DimensionBuffer dimensions = DimensionBuffer.fromBytes(dimensionBuffer); for (int i = 0; i < numDimensions; i++) { String dimensionName = dimensionNameToIndexMap.inverse().get(i); if (i == splitDimensionId || (skipMaterializationForDimensions != null && skipMaterializationForDimensions.contains(dimensionName))) { dos.writeInt(StarTreeIndexNode.all()); } else { dos.writeInt(dimensions.getDimension(i)); } } dos.write(metricBuffer); } dos.close(); dataSorter = new StarTreeDataTable(tempFile, dimensionSizeBytes, metricSizeBytes, getSortOrder()); dataSorter.sort(0, endDocId - startDocId); if (debugMode) { printFile(tempFile, 0, endDocId - startDocId); } final Iterator<Pair<byte[], byte[]>> iterator = dataSorter.iterator(0, endDocId - startDocId); return new Iterator<Pair<DimensionBuffer, MetricBuffer>>() { Pair<DimensionBuffer, MetricBuffer> prev = null; boolean done = false; @Override public void remove() { throw new UnsupportedOperationException(); } @Override public boolean hasNext() { return !done; } @Override public Pair<DimensionBuffer, MetricBuffer> next() { while (iterator.hasNext()) { Pair<byte[], byte[]> next = iterator.next(); byte[] dimBuffer = next.getLeft(); byte[] metricBuffer = next.getRight(); if (prev == null) { prev = Pair.of(DimensionBuffer.fromBytes(dimBuffer), MetricBuffer.fromBytes(metricBuffer, metricTypes)); } else { Pair<DimensionBuffer, MetricBuffer> current = Pair.of(DimensionBuffer.fromBytes(dimBuffer), MetricBuffer.fromBytes(metricBuffer, metricTypes)); if (!current.getLeft().equals(prev.getLeft())) { Pair<DimensionBuffer, MetricBuffer> ret = prev; prev = current; LOG.debug("Returning unique {}", prev.getLeft()); return ret; } else { prev.getRight().aggregate(current.getRight(), metricTypes); } } } done = true; LOG.debug("Returning unique {}", prev.getLeft()); return prev; } }; } /** * sorts the file from start to end on a dimension index * @param startDocId * @param endDocId * @param dimension * @param file * @return */ private Map<Integer, IntPair> groupBy(int startDocId, int endDocId, Integer dimension, File file) { StarTreeDataTable dataSorter = new StarTreeDataTable(file, dimensionSizeBytes, metricSizeBytes, getSortOrder()); return dataSorter.groupByIntColumnCount(startDocId, endDocId, dimension); } /** * Iterator to iterate over the records from startDocId to endDocId */ @Override public Iterator<GenericRow> iterator(final int startDocId, final int endDocId) throws Exception { StarTreeDataTable dataSorter = new StarTreeDataTable(dataFile, dimensionSizeBytes, metricSizeBytes, getSortOrder()); final Iterator<Pair<byte[], byte[]>> iterator = dataSorter.iterator(startDocId, endDocId); return new Iterator<GenericRow>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public void remove() { throw new UnsupportedOperationException(); } @Override public GenericRow next() { Pair<byte[], byte[]> pair = iterator.next(); DimensionBuffer dimensionKey = DimensionBuffer.fromBytes(pair.getLeft()); MetricBuffer metricsHolder = MetricBuffer.fromBytes(pair.getRight(), metricTypes); return toGenericRow(dimensionKey, metricsHolder); } }; } public JSONObject getStarTreeAsJSON() throws Exception { JSONObject json = new JSONObject(); toJson(json, starTreeRootIndexNode, dictionaryMap); return json; } private void toJson(JSONObject json, StarTreeIndexNode node, Map<String, HashBiMap<Object, Integer>> dictionaryMap) throws Exception { String dimName = "ALL"; Object dimValue = "ALL"; if (node.getDimensionName() != StarTreeIndexNode.all()) { dimName = dimensionNames.get(node.getDimensionName()); } if (node.getDimensionValue() != StarTreeIndexNode.all()) { dimValue = dictionaryMap.get(dimName).inverse().get(node.getDimensionValue()); } json.put("title", dimName + ":" + dimValue); if (node.getChildren() != null) { JSONObject[] childJsons = new JSONObject[node.getChildren().size()]; int index = 0; for (Integer child : node.getChildren().keySet()) { StarTreeIndexNode childNode = node.getChildren().get(child); JSONObject childJson = new JSONObject(); toJson(childJson, childNode, dictionaryMap); childJsons[index++] = childJson; } json.put("nodes", childJsons); } } @Override public void cleanup() { if (outDir != null) { FileUtils.deleteQuietly(outDir); } } @Override public StarTree getTree() { return starTree; } @Override public int getTotalRawDocumentCount() { return rawRecordCount; } @Override public int getTotalAggregateDocumentCount() { return aggRecordCount; } @Override public int getMaxLeafRecords() { return maxLeafRecords; } @Override public List<String> getDimensionsSplitOrder() { return dimensionsSplitOrder; } public Map<String, HashBiMap<Object, Integer>> getDictionaryMap() { return dictionaryMap; } public HashBiMap<String, Integer> getDimensionNameToIndexMap() { return dimensionNameToIndexMap; } @Override public Set<String> getSkipMaterializationForDimensions() { return skipMaterializationForDimensions; } }<|fim▁end|>
dictionary.put(dimValue, dictionary.size()); } dimension.setDimension(i, dictionary.get(dimValue)); }
<|file_name|>requireAfterTeardown.test.ts<|end_file_name|><|fim▁begin|>/** * Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree.<|fim▁hole|>import runJest from '../runJest'; test('prints useful error for requires after test is done', () => { const {stderr} = runJest('require-after-teardown'); const interestingLines = stderr .split('\n') .slice(9, 18) .join('\n'); expect(wrap(interestingLines)).toMatchSnapshot(); expect(stderr.split('\n')[19]).toMatch( new RegExp('(__tests__/lateRequire.test.js:11:20)'), ); });<|fim▁end|>
*/ import {wrap} from 'jest-snapshot-serializer-raw';
<|file_name|>channels.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ################################################## ## DEPENDENCIES import sys import os import os.path try: import builtins as builtin except ImportError: import __builtin__ as builtin from os.path import getmtime, exists import time import types from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple from Cheetah.Template import Template from Cheetah.DummyTransaction import * from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList from Cheetah.CacheRegion import CacheRegion import Cheetah.Filters as Filters import Cheetah.ErrorCatchers as ErrorCatchers from Plugins.Extensions.OpenWebif.local import tstrings ################################################## ## MODULE CONSTANTS VFFSL=valueFromFrameOrSearchList VFSL=valueFromSearchList VFN=valueForName currentTime=time.time __CHEETAH_version__ = '2.4.4' __CHEETAH_versionTuple__ = (2, 4, 4, 'development', 0) __CHEETAH_genTime__ = 1447321436.394491 __CHEETAH_genTimestamp__ = 'Thu Nov 12 18:43:56 2015' __CHEETAH_src__ = '/home/knuth/openpli-oe-core/build/tmp/work/fusionhd-oe-linux/enigma2-plugin-extensions-openwebif/1+gitAUTOINC+5837c87afc-r0/git/plugin/controllers/views/mobile/channels.tmpl' __CHEETAH_srcLastModified__ = 'Thu Nov 12 18:43:41 2015' __CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine' if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple: raise AssertionError( 'This template was compiled with Cheetah version' ' %s. Templates compiled before version %s must be recompiled.'%( __CHEETAH_version__, RequiredCheetahVersion)) ################################################## ## CLASSES class channels(Template): ################################################## ## CHEETAH GENERATED METHODS def __init__(self, *args, **KWs): super(channels, self).__init__(*args, **KWs) if not self._CHEETAH__instanceInitialized: cheetahKWArgs = {} allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split() for k,v in KWs.items(): if k in allowedKWs: cheetahKWArgs[k] = v self._initCheetahInstance(**cheetahKWArgs) def respond(self, trans=None): ## CHEETAH: main method generated for this template if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)): trans = self.transaction # is None unless self.awake() was called if not trans: trans = DummyTransaction() _dummyTrans = True else: _dummyTrans = False write = trans.response().write SL = self._CHEETAH__searchList _filter = self._CHEETAH__currentFilter ######################################## ## START - generated method body write(u'''<html>\r <head>\r \t<title>OpenWebif</title>\r \t<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />\r \t<meta name="viewport" content="user-scalable=no, width=device-width"/>\r \t<meta name="apple-mobile-web-app-capable" content="yes" />\r \t<link rel="stylesheet" type="text/css" href="/css/jquery.mobile-1.0.min.css" media="screen"/>\r \t<link rel="stylesheet" type="text/css" href="/css/iphone.css" media="screen"/>\r \t<script src="/js/jquery-1.6.2.min.js"></script>\r \t<script src="/js/jquery.mobile-1.0.min.js"></script>\r </head>\r <body> \r \t<div data-role="page">\r \r \t\t<div id="header">\r \t\t\t<div class="button" onClick="history.back()">''') _v = VFFSL(SL,"tstrings",True)['back'] # u"$tstrings['back']" on line 17, col 49 if _v is not None: write(_filter(_v, rawExpr=u"$tstrings['back']")) # from line 17, col 49. write(u'''</div>\r \t\t\t<h1><a style="color:#FFF;text-decoration:none;" href=\'/mobile\'>OpenWebif</a></h1> \t\t</div>\r \r \t\t<div id="contentContainer">\r \t\t\t<ul data-role="listview" data-inset="true" data-theme="d">\r \t\t\t\t<li data-role="list-divider" role="heading" data-theme="b">''') _v = VFFSL(SL,"tstrings",True)['channels'] # u"$tstrings['channels']" on line 23, col 64 if _v is not None: write(_filter(_v, rawExpr=u"$tstrings['channels']")) # from line 23, col 64. write(u'''</li>\r ''') for channel in VFFSL(SL,"channels",True): # generated from line 24, col 5 write(u'''\t\t\t\t<li>\r \t\t\t\t<a href="/mobile/channelinfo?sref=''') _v = VFFSL(SL,"channel.ref",True) # u'$channel.ref' on line 26, col 39 if _v is not None: write(_filter(_v, rawExpr=u'$channel.ref')) # from line 26, col 39. write(u'''" style="padding: 3px;">\r \t\t\t\t<span class="ui-li-heading" style="margin-top: 0px; margin-bottom: 3px;">''') _v = VFFSL(SL,"channel.name",True) # u'$channel.name' on line 27, col 78 if _v is not None: write(_filter(_v, rawExpr=u'$channel.name')) # from line 27, col 78. write(u'''</span>\r ''') if VFN(VFFSL(SL,"channel",True),"has_key",False)('now_title'): # generated from line 28, col 5 write(u'''\t\t\t\t<span class="ui-li-desc" style="margin-bottom: 0px;">''') _v = VFFSL(SL,"channel.now_title",True) # u'$channel.now_title' on line 29, col 58 if _v is not None: write(_filter(_v, rawExpr=u'$channel.now_title')) # from line 29, col 58. write(u'''</span>\r ''') write(u'''\t\t\t\t</a>\r \t\t\t\t</li>\r ''') write(u'''\t\t\t</ul>\r \t\t</div>\r \r \t\t<div id="footer">\r \t\t\t<p>OpenWebif Mobile</p>\r<|fim▁hole|>\t\t\t<a onclick="document.location.href=\'/index?mode=fullpage\';return false;" href="#">''') _v = VFFSL(SL,"tstrings",True)['show_full_openwebif'] # u"$tstrings['show_full_openwebif']" on line 39, col 86 if _v is not None: write(_filter(_v, rawExpr=u"$tstrings['show_full_openwebif']")) # from line 39, col 86. write(u'''</a>\r \t\t</div>\r \t\t\r \t</div>\r </body>\r </html>\r ''') ######################################## ## END - generated method body return _dummyTrans and trans.response().getvalue() or "" ################################################## ## CHEETAH GENERATED ATTRIBUTES _CHEETAH__instanceInitialized = False _CHEETAH_version = __CHEETAH_version__ _CHEETAH_versionTuple = __CHEETAH_versionTuple__ _CHEETAH_genTime = __CHEETAH_genTime__ _CHEETAH_genTimestamp = __CHEETAH_genTimestamp__ _CHEETAH_src = __CHEETAH_src__ _CHEETAH_srcLastModified = __CHEETAH_srcLastModified__ _mainCheetahMethod_for_channels= 'respond' ## END CLASS DEFINITION if not hasattr(channels, '_initCheetahAttributes'): templateAPIClass = getattr(channels, '_CHEETAH_templateClass', Template) templateAPIClass._addCheetahPlumbingCodeToClass(channels) # CHEETAH was developed by Tavis Rudd and Mike Orr # with code, advice and input from many other volunteers. # For more information visit http://www.CheetahTemplate.org/ ################################################## ## if run from command line: if __name__ == '__main__': from Cheetah.TemplateCmdLineIface import CmdLineIface CmdLineIface(templateObj=channels()).run()<|fim▁end|>
<|file_name|>vtable.rs<|end_file_name|><|fim▁begin|>/* * Copyright 2018 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use crate::endian_scalar::read_scalar_at; use crate::follow::Follow; use crate::primitives::*; /// VTable encapsulates read-only usage of a vtable. It is only to be used /// by generated code. #[derive(Debug)] pub struct VTable<'a> { buf: &'a [u8], loc: usize, } impl<'a> PartialEq for VTable<'a> { fn eq(&self, other: &VTable) -> bool { self.as_bytes().eq(other.as_bytes()) } } impl<'a> VTable<'a> { pub fn init(buf: &'a [u8], loc: usize) -> Self { VTable { buf, loc } } pub fn num_fields(&self) -> usize { (self.num_bytes() / SIZE_VOFFSET) - 2 } pub fn num_bytes(&self) -> usize { unsafe { read_scalar_at::<VOffsetT>(self.buf, self.loc) as usize } } pub fn object_inline_num_bytes(&self) -> usize { let n = unsafe { read_scalar_at::<VOffsetT>(self.buf, self.loc + SIZE_VOFFSET) }; n as usize } pub fn get_field(&self, idx: usize) -> VOffsetT { // TODO(rw): distinguish between None and 0? if idx > self.num_fields() { return 0; } unsafe {<|fim▁hole|> ) } } pub fn get(&self, byte_loc: VOffsetT) -> VOffsetT { // TODO(rw): distinguish between None and 0? if byte_loc as usize >= self.num_bytes() { return 0; } unsafe { read_scalar_at::<VOffsetT>(self.buf, self.loc + byte_loc as usize) } } pub fn as_bytes(&self) -> &[u8] { let len = self.num_bytes(); &self.buf[self.loc..self.loc + len] } } #[allow(dead_code)] pub fn field_index_to_field_offset(field_id: VOffsetT) -> VOffsetT { // Should correspond to what end_table() below builds up. let fixed_fields = 2; // Vtable size and Object Size. ((field_id + fixed_fields) * (SIZE_VOFFSET as VOffsetT)) as VOffsetT } #[allow(dead_code)] pub fn field_offset_to_field_index(field_o: VOffsetT) -> VOffsetT { debug_assert!(field_o >= 2); let fixed_fields = 2; // VTable size and Object Size. (field_o / (SIZE_VOFFSET as VOffsetT)) - fixed_fields } impl<'a> Follow<'a> for VTable<'a> { type Inner = VTable<'a>; fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { VTable::init(buf, loc) } }<|fim▁end|>
read_scalar_at::<VOffsetT>( self.buf, self.loc + SIZE_VOFFSET + SIZE_VOFFSET + SIZE_VOFFSET * idx,
<|file_name|>book-store.rs<|end_file_name|><|fim▁begin|>//! Tests for book-store //! //! Generated by [script][script] using [canonical data][canonical-data] //! //! [script]: https://github.com/exercism/rust/blob/master/bin/init_exercise.py //! [canonical-data]: https://raw.githubusercontent.com/exercism/problem-specifications/master/exercises/book-store/canonical_data.json extern crate book_store; use book_store::*; /// Process a single test case for the property `total` /// /// All cases for the `total` property are implemented /// in terms of this function. /// /// Expected input format: ('basket', 'targetgrouping') fn process_total_case(input: (Vec<usize>, Vec<Vec<usize>>), expected: f64) { assert_eq!( lowest_price(&input.0), expected ) } // Return the total basket price after applying the best discount. // Calculate lowest price for a shopping basket containing books only from // a single series. There is no discount advantage for having more than // one copy of any single book in a grouping. #[test]<|fim▁hole|> #[test] #[ignore] /// Two of the same book fn test_two_of_the_same_book() { process_total_case((vec![2, 2], vec![vec![2], vec![2]]), 16.0); } #[test] #[ignore] /// Empty basket fn test_empty_basket() { process_total_case((vec![], vec![]), 0.0); } #[test] #[ignore] /// Two different books fn test_two_different_books() { process_total_case((vec![1, 2], vec![vec![1, 2]]), 15.2); } #[test] #[ignore] /// Three different books fn test_three_different_books() { process_total_case((vec![1, 2, 3], vec![vec![1, 2, 3]]), 21.6); } #[test] #[ignore] /// Four different books fn test_four_different_books() { process_total_case((vec![1, 2, 3, 4], vec![vec![1, 2, 3, 4]]), 25.6); } #[test] #[ignore] /// Five different books fn test_five_different_books() { process_total_case((vec![1, 2, 3, 4, 5], vec![vec![1, 2, 3, 4, 5]]), 30.0); } #[test] #[ignore] /// Two groups of four is cheaper than group of five plus group of three fn test_two_groups_of_four_is_cheaper_than_group_of_five_plus_group_of_three() { process_total_case((vec![1, 1, 2, 2, 3, 3, 4, 5], vec![vec![1, 2, 3, 4], vec![1, 2, 3, 5]]), 51.2); } #[test] #[ignore] /// Group of four plus group of two is cheaper than two groups of three fn test_group_of_four_plus_group_of_two_is_cheaper_than_two_groups_of_three() { process_total_case((vec![1, 1, 2, 2, 3, 4], vec![vec![1, 2, 3, 4], vec![1, 2]]), 40.8); } #[test] #[ignore] /// Two each of first 4 books and 1 copy each of rest fn test_two_each_of_first_4_books_and_1_copy_each_of_rest() { process_total_case((vec![1, 1, 2, 2, 3, 3, 4, 4, 5], vec![vec![1, 2, 3, 4, 5], vec![1, 2, 3, 4]]), 55.6); } #[test] #[ignore] /// Two copies of each book fn test_two_copies_of_each_book() { process_total_case((vec![1, 1, 2, 2, 3, 3, 4, 4, 5, 5], vec![vec![1, 2, 3, 4, 5], vec![1, 2, 3, 4, 5]]), 60.0); } #[test] #[ignore] /// Three copies of first book and 2 each of remaining fn test_three_copies_of_first_book_and_2_each_of_remaining() { process_total_case((vec![1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 1], vec![vec![1, 2, 3, 4, 5], vec![1, 2, 3, 4, 5], vec![1]]), 68.0); } #[test] #[ignore] /// Three each of first 2 books and 2 each of remaining books fn test_three_each_of_first_2_books_and_2_each_of_remaining_books() { process_total_case((vec![1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 1, 2], vec![vec![1, 2, 3, 4, 5], vec![1, 2, 3, 4, 5], vec![1, 2]]), 75.2); } #[test] #[ignore] /// Four groups of four are cheaper than two groups each of five and three fn test_four_groups_of_four_are_cheaper_than_two_groups_each_of_five_and_three() { process_total_case((vec![1,1,2,2,3,3,4,5,1,1,2,2,3,3,4,5], vec![vec![1,2,3,4],vec![1,2,3,5],vec![1,2,3,4],vec![1,2,3,5]]), 102.4); }<|fim▁end|>
/// Only a single book fn test_only_a_single_book() { process_total_case((vec![1], vec![vec![1]]), 8.0); }
<|file_name|>issue-63832-await-short-temporary-lifetime.rs<|end_file_name|><|fim▁begin|>// check-pass // edition:2018 async fn foo(x: &[Vec<u32>]) -> u32 { 0 } <|fim▁hole|> foo(&[vec![123]]).await; } fn main() { }<|fim▁end|>
async fn bar() {
<|file_name|>oneshot.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /// Oneshot channels/ports /// /// This is the initial flavor of channels/ports used for comm module. This is /// an optimization for the one-use case of a channel. The major optimization of /// this type is to have one and exactly one allocation when the chan/port pair /// is created. /// /// Another possible optimization would be to not use an Arc box because /// in theory we know when the shared packet can be deallocated (no real need /// for the atomic reference counting), but I was having trouble how to destroy /// the data early in a drop of a Port. /// /// # Implementation /// /// Oneshots are implemented around one atomic uint variable. This variable /// indicates both the state of the port/chan but also contains any tasks /// blocked on the port. All atomic operations happen on this one word. /// /// In order to upgrade a oneshot channel, an upgrade is considered a disconnect /// on behalf of the channel side of things (it can be mentally thought of as /// consuming the port). This upgrade is then also stored in the shared packet. /// The one caveat to consider is that when a port sees a disconnected channel /// it must check for data because there is no "data plus upgrade" state. use core::prelude::*; use alloc::boxed::Box; use core::mem; use rustrt::local::Local; use rustrt::task::{Task, BlockedTask}; use atomic; use comm::Receiver; // Various states you can find a port in. static EMPTY: uint = 0; static DATA: uint = 1; static DISCONNECTED: uint = 2; pub struct Packet<T> { // Internal state of the chan/port pair (stores the blocked task as well) state: atomic::AtomicUint, // One-shot data slot location data: Option<T>, // when used for the second time, a oneshot channel must be upgraded, and // this contains the slot for the upgrade upgrade: MyUpgrade<T>, } pub enum Failure<T> { Empty, Disconnected, Upgraded(Receiver<T>), } pub enum UpgradeResult { UpSuccess, UpDisconnected, UpWoke(BlockedTask), } pub enum SelectionResult<T> { SelCanceled(BlockedTask), SelUpgraded(BlockedTask, Receiver<T>), SelSuccess, } enum MyUpgrade<T> { NothingSent, SendUsed, GoUp(Receiver<T>), } impl<T: Send> Packet<T> { pub fn new() -> Packet<T> { Packet { data: None, upgrade: NothingSent, state: atomic::AtomicUint::new(EMPTY), } } pub fn send(&mut self, t: T) -> Result<(), T> { // Sanity check match self.upgrade { NothingSent => {} _ => fail!("sending on a oneshot that's already sent on "), } assert!(self.data.is_none()); self.data = Some(t); self.upgrade = SendUsed; match self.state.swap(DATA, atomic::SeqCst) { // Sent the data, no one was waiting EMPTY => Ok(()), // Couldn't send the data, the port hung up first. Return the data // back up the stack. DISCONNECTED => { Err(self.data.take().unwrap()) } // Not possible, these are one-use channels DATA => unreachable!(), // Anything else means that there was a task waiting on the other // end. We leave the 'DATA' state inside so it'll pick it up on the // other end. n => unsafe { let t = BlockedTask::cast_from_uint(n); t.wake().map(|t| t.reawaken()); Ok(()) } } } // Just tests whether this channel has been sent on or not, this is only // safe to use from the sender. pub fn sent(&self) -> bool { match self.upgrade { NothingSent => false, _ => true, } } pub fn recv(&mut self) -> Result<T, Failure<T>> { // Attempt to not block the task (it's a little expensive). If it looks // like we're not empty, then immediately go through to `try_recv`. if self.state.load(atomic::SeqCst) == EMPTY { let t: Box<Task> = Local::take(); t.deschedule(1, |task| { let n = unsafe { task.cast_to_uint() }; match self.state.compare_and_swap(EMPTY, n, atomic::SeqCst) { // Nothing on the channel, we legitimately block EMPTY => Ok(()), // If there's data or it's a disconnected channel, then we // failed the cmpxchg, so we just wake ourselves back up DATA | DISCONNECTED => { unsafe { Err(BlockedTask::cast_from_uint(n)) } } // Only one thread is allowed to sleep on this port _ => unreachable!() } }); } self.try_recv() } pub fn try_recv(&mut self) -> Result<T, Failure<T>> { match self.state.load(atomic::SeqCst) { EMPTY => Err(Empty), // We saw some data on the channel, but the channel can be used // again to send us an upgrade. As a result, we need to re-insert // into the channel that there's no data available (otherwise we'll // just see DATA next time). This is done as a cmpxchg because if // the state changes under our feet we'd rather just see that state<|fim▁hole|> // change. DATA => { self.state.compare_and_swap(DATA, EMPTY, atomic::SeqCst); match self.data.take() { Some(data) => Ok(data), None => unreachable!(), } } // There's no guarantee that we receive before an upgrade happens, // and an upgrade flags the channel as disconnected, so when we see // this we first need to check if there's data available and *then* // we go through and process the upgrade. DISCONNECTED => { match self.data.take() { Some(data) => Ok(data), None => { match mem::replace(&mut self.upgrade, SendUsed) { SendUsed | NothingSent => Err(Disconnected), GoUp(upgrade) => Err(Upgraded(upgrade)) } } } } _ => unreachable!() } } // Returns whether the upgrade was completed. If the upgrade wasn't // completed, then the port couldn't get sent to the other half (it will // never receive it). pub fn upgrade(&mut self, up: Receiver<T>) -> UpgradeResult { let prev = match self.upgrade { NothingSent => NothingSent, SendUsed => SendUsed, _ => fail!("upgrading again"), }; self.upgrade = GoUp(up); match self.state.swap(DISCONNECTED, atomic::SeqCst) { // If the channel is empty or has data on it, then we're good to go. // Senders will check the data before the upgrade (in case we // plastered over the DATA state). DATA | EMPTY => UpSuccess, // If the other end is already disconnected, then we failed the // upgrade. Be sure to trash the port we were given. DISCONNECTED => { self.upgrade = prev; UpDisconnected } // If someone's waiting, we gotta wake them up n => UpWoke(unsafe { BlockedTask::cast_from_uint(n) }) } } pub fn drop_chan(&mut self) { match self.state.swap(DISCONNECTED, atomic::SeqCst) { DATA | DISCONNECTED | EMPTY => {} // If someone's waiting, we gotta wake them up n => unsafe { let t = BlockedTask::cast_from_uint(n); t.wake().map(|t| t.reawaken()); } } } pub fn drop_port(&mut self) { match self.state.swap(DISCONNECTED, atomic::SeqCst) { // An empty channel has nothing to do, and a remotely disconnected // channel also has nothing to do b/c we're about to run the drop // glue DISCONNECTED | EMPTY => {} // There's data on the channel, so make sure we destroy it promptly. // This is why not using an arc is a little difficult (need the box // to stay valid while we take the data). DATA => { self.data.take().unwrap(); } // We're the only ones that can block on this port _ => unreachable!() } } //////////////////////////////////////////////////////////////////////////// // select implementation //////////////////////////////////////////////////////////////////////////// // If Ok, the value is whether this port has data, if Err, then the upgraded // port needs to be checked instead of this one. pub fn can_recv(&mut self) -> Result<bool, Receiver<T>> { match self.state.load(atomic::SeqCst) { EMPTY => Ok(false), // Welp, we tried DATA => Ok(true), // we have some un-acquired data DISCONNECTED if self.data.is_some() => Ok(true), // we have data DISCONNECTED => { match mem::replace(&mut self.upgrade, SendUsed) { // The other end sent us an upgrade, so we need to // propagate upwards whether the upgrade can receive // data GoUp(upgrade) => Err(upgrade), // If the other end disconnected without sending an // upgrade, then we have data to receive (the channel is // disconnected). up => { self.upgrade = up; Ok(true) } } } _ => unreachable!(), // we're the "one blocker" } } // Attempts to start selection on this port. This can either succeed, fail // because there is data, or fail because there is an upgrade pending. pub fn start_selection(&mut self, task: BlockedTask) -> SelectionResult<T> { let n = unsafe { task.cast_to_uint() }; match self.state.compare_and_swap(EMPTY, n, atomic::SeqCst) { EMPTY => SelSuccess, DATA => SelCanceled(unsafe { BlockedTask::cast_from_uint(n) }), DISCONNECTED if self.data.is_some() => { SelCanceled(unsafe { BlockedTask::cast_from_uint(n) }) } DISCONNECTED => { match mem::replace(&mut self.upgrade, SendUsed) { // The other end sent us an upgrade, so we need to // propagate upwards whether the upgrade can receive // data GoUp(upgrade) => { SelUpgraded(unsafe { BlockedTask::cast_from_uint(n) }, upgrade) } // If the other end disconnected without sending an // upgrade, then we have data to receive (the channel is // disconnected). up => { self.upgrade = up; SelCanceled(unsafe { BlockedTask::cast_from_uint(n) }) } } } _ => unreachable!(), // we're the "one blocker" } } // Remove a previous selecting task from this port. This ensures that the // blocked task will no longer be visible to any other threads. // // The return value indicates whether there's data on this port. pub fn abort_selection(&mut self) -> Result<bool, Receiver<T>> { let state = match self.state.load(atomic::SeqCst) { // Each of these states means that no further activity will happen // with regard to abortion selection s @ EMPTY | s @ DATA | s @ DISCONNECTED => s, // If we've got a blocked task, then use an atomic to gain ownership // of it (may fail) n => self.state.compare_and_swap(n, EMPTY, atomic::SeqCst) }; // Now that we've got ownership of our state, figure out what to do // about it. match state { EMPTY => unreachable!(), // our task used for select was stolen DATA => Ok(true), // If the other end has hung up, then we have complete ownership // of the port. First, check if there was data waiting for us. This // is possible if the other end sent something and then hung up. // // We then need to check to see if there was an upgrade requested, // and if so, the upgraded port needs to have its selection aborted. DISCONNECTED => { if self.data.is_some() { Ok(true) } else { match mem::replace(&mut self.upgrade, SendUsed) { GoUp(port) => Err(port), _ => Ok(true), } } } // We woke ourselves up from select. Assert that the task should be // trashed and returned that we don't have any data. n => { let t = unsafe { BlockedTask::cast_from_uint(n) }; t.trash(); Ok(false) } } } } #[unsafe_destructor] impl<T: Send> Drop for Packet<T> { fn drop(&mut self) { assert_eq!(self.state.load(atomic::SeqCst), DISCONNECTED); } }<|fim▁end|>
<|file_name|>projectdb.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- encoding: utf-8 -*- # vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8: # Author: Binux<[email protected]> # http://binux.me # Created on 2014-07-17 21:06:43 import time import mysql.connector from pyspider.database.base.projectdb import ProjectDB as BaseProjectDB from pyspider.database.basedb import BaseDB from .mysqlbase import MySQLMixin class ProjectDB(MySQLMixin, BaseProjectDB, BaseDB): __tablename__ = 'projectdb' def __init__(self, host='localhost', port=3306, database='projectdb', user='root', passwd=None): self.database_name = database self.conn = mysql.connector.connect(user=user, password=passwd, host=host, port=port, autocommit=True) if database not in [x[0] for x in self._execute('show databases')]: self._execute('CREATE DATABASE %s' % self.escape(database)) self.conn.database = database self._execute('''CREATE TABLE IF NOT EXISTS %s ( `name` varchar(64) PRIMARY KEY, `group` varchar(64), `status` varchar(16), `script` TEXT, `comments` varchar(1024), `rate` float(11, 4), `burst` float(11, 4), `updatetime` double(16, 4) ) ENGINE=MyISAM CHARSET=utf8''' % self.escape(self.__tablename__)) def insert(self, name, obj={}): obj = dict(obj) obj['name'] = name obj['updatetime'] = time.time() return self._insert(**obj) def update(self, name, obj={}, **kwargs): obj = dict(obj) obj.update(kwargs) obj['updatetime'] = time.time() ret = self._update(where="`name` = %s" % self.placeholder, where_values=(name, ), **obj) return ret.rowcount def get_all(self, fields=None): return self._select2dic(what=fields) def get(self, name, fields=None): where = "`name` = %s" % self.placeholder for each in self._select2dic(what=fields, where=where, where_values=(name, )): return each<|fim▁hole|> where = "`name` = %s" % self.placeholder return self._delete(where=where, where_values=(name, )) def check_update(self, timestamp, fields=None): where = "`updatetime` >= %f" % timestamp return self._select2dic(what=fields, where=where)<|fim▁end|>
return None def drop(self, name):
<|file_name|>log_ft_data.cpp<|end_file_name|><|fim▁begin|>/* * log_ft_data.cpp * * Created on: Jul 9, 2010 * Author: dc */ #include <iostream> #include <cstdio> #include <cstdlib> #include <syslog.h> #include <signal.h> #include <unistd.h> #include <native/task.h> #include <native/timer.h> #include <boost/thread.hpp> #include <boost/ref.hpp> #include <boost/tuple/tuple.hpp> #include <barrett/detail/stacktrace.h> #include <barrett/detail/stl_utils.h> #include <barrett/units.h> #include <barrett/log.h> #include <barrett/products/product_manager.h> using namespace barrett; using detail::waitForEnter; BARRETT_UNITS_FIXED_SIZE_TYPEDEFS; typedef boost::tuple<double, cf_type, ct_type> tuple_type; bool g_Going = true; // Global void stopThreads(int sig) { g_Going = false; } void warnOnSwitchToSecondaryMode(int) { syslog(LOG_ERR, "WARNING: Switched out of RealTime. Stack-trace:"); detail::syslog_stacktrace(); std::cerr << "WARNING: Switched out of RealTime. Stack-trace in syslog.\n"; } void ftThreadEntryPoint(bool* going, double T_s, ForceTorqueSensor& fts, log::RealTimeWriter<tuple_type>* lw, int windowSize, int* numSamples, tuple_type* sum) { tuple_type t; rt_task_shadow(new RT_TASK, NULL, 10, 0); rt_task_set_mode(0, T_PRIMARY | T_WARNSW, NULL); rt_task_set_periodic(NULL, TM_NOW, T_s * 1e9); RTIME now = rt_timer_read(); RTIME lastUpdate = now; while (*going) { rt_task_wait_period(NULL); now = rt_timer_read(); fts.update(true); // Do a realtime update (no sleeping while waiting for messages) boost::get<0>(t) = ((double) now - lastUpdate) * 1e-9; boost::get<1>(t) = fts.getForce(); boost::get<2>(t) = fts.getTorque(); if (lw != NULL) { lw->putRecord(t); } else { if (*numSamples == 0) { boost::get<1>(*sum).setZero(); boost::get<2>(*sum).setZero(); } if (*numSamples < windowSize) { boost::get<1>(*sum) += boost::get<1>(t); boost::get<2>(*sum) += boost::get<2>(t); ++(*numSamples); } } lastUpdate = now; } rt_task_set_mode(T_WARNSW, 0, NULL); } void showUsageAndExit(const char* programName) { printf("Usage: %s {-f <fileName> | -a <windowSize>} [<samplePeriodInSeconds>]\n", programName); printf(" -f <fileName> Log data to a file\n"); printf(" -a <windowSize> Print statistics on segments of data\n"); exit(0); } int main(int argc, char** argv) { char* outFile = NULL; double T_s = 0.002; // Default: 500Hz bool fileMode = false; int windowSize = 0; if (argc == 4) { T_s = std::atof(argv[3]); } else if (argc != 3) { showUsageAndExit(argv[0]); } printf("Sample period: %fs\n", T_s); if (strcmp(argv[1], "-f") == 0) { fileMode = true;<|fim▁hole|> fileMode = false; windowSize = atoi(argv[2]); printf("Window size: %d\n", windowSize); } else { showUsageAndExit(argv[0]); } printf("\n"); signal(SIGXCPU, &warnOnSwitchToSecondaryMode); char tmpFile[] = "/tmp/btXXXXXX"; log::RealTimeWriter<tuple_type>* lw = NULL; if (fileMode) { signal(SIGINT, &stopThreads); if (mkstemp(tmpFile) == -1) { printf("ERROR: Couldn't create temporary file!\n"); return 1; } lw = new barrett::log::RealTimeWriter<tuple_type>(tmpFile, T_s); } int numSamples = windowSize, numSets = 0; tuple_type sum; ProductManager pm; if ( !pm.foundForceTorqueSensor() ) { printf("ERROR: No Force-Torque Sensor found!\n"); return 1; } boost::thread ftThread(ftThreadEntryPoint, &g_Going, T_s, boost::ref(*pm.getForceTorqueSensor()), lw, windowSize, &numSamples, &sum); if (fileMode) { printf(">>> Logging data. Press [Ctrl-C] to exit.\n"); } else { printf(">>> Press [Enter] to start a new sample. Press [Ctrl-C] to exit.\n\n"); printf("ID,FX,FY,FZ,TX,TY,TZ"); while (g_Going) { waitForEnter(); numSamples = 0; while (numSamples != windowSize) { usleep(100000); } boost::get<1>(sum) /= windowSize; boost::get<2>(sum) /= windowSize; printf("%d,%f,%f,%f,%f,%f,%f", ++numSets, boost::get<1>(sum)[0], boost::get<1>(sum)[1], boost::get<1>(sum)[2], boost::get<2>(sum)[0], boost::get<2>(sum)[1], boost::get<2>(sum)[2]); } } ftThread.join(); printf("\n"); if (fileMode) { delete lw; log::Reader<tuple_type> lr(tmpFile); lr.exportCSV(outFile); printf("Output written to %s.\n", outFile); std::remove(tmpFile); } return 0; }<|fim▁end|>
outFile = argv[2]; printf("Output file: %s\n", outFile); } else if (strcmp(argv[1], "-a") == 0) {
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import csv import pickle import datetime import os import urllib.request, urllib.parse, urllib.error from django.core.cache import cache from django.urls import reverse from django.http import HttpResponseRedirect, HttpResponse, Http404 from django.db.models import Q from django.db.models.aggregates import Max from django.shortcuts import render, get_object_or_404 from django.contrib.auth.decorators import login_required from django.contrib import messages from django.utils.html import mark_safe from django.conf import settings from coredata.models import Member, CourseOffering, Person, Semester, Role from courselib.auth import ForbiddenResponse, NotFoundResponse, is_course_student_by_slug from courselib.auth import is_course_staff_by_slug, requires_course_staff_by_slug from courselib.search import find_member from forum.models import Forum from grades.models import all_activities_filter from grades.models import Activity, NumericActivity, LetterActivity, CalNumericActivity, GradeHistory from grades.models import NumericGrade, LetterGrade from grades.models import CalLetterActivity, ACTIVITY_TYPES, FLAGS from grades.models import neaten_activity_positions from grades.forms import NumericActivityForm, LetterActivityForm, CalNumericActivityForm, MessageForm from grades.forms import ActivityFormEntry, FormulaFormEntry, StudentSearchForm, FORMTYPE from grades.forms import GROUP_STATUS_MAP, CourseConfigForm, CalLetterActivityForm, CutoffForm from grades.formulas import EvalException, activities_dictionary, eval_parse from grades.utils import reorder_course_activities from grades.utils import ORDER_TYPE, FormulaTesterActivityEntry, FakeActivity, FakeEvalActivity from grades.utils import generate_numeric_activity_stat,generate_letter_activity_stat from grades.utils import ValidationError, calculate_numeric_grade, calculate_letter_grade from marking.models import get_group_mark, StudentActivityMark, GroupActivityMark, ActivityComponent from groups.models import GroupMember, add_activity_to_group from quizzes.models import Quiz from submission.models import SubmissionComponent, GroupSubmission, StudentSubmission, SubmissionInfo, select_all_submitted_components, select_all_components from log.models import LogEntry from pages.models import Page, ACL_ROLES from dashboard.models import UserConfig, NewsItem from dashboard.photos import pre_fetch_photos, photo_for_view from discuss import activity as discuss_activity FROMPAGE = {'course': 'course', 'activityinfo': 'activityinfo', 'activityinfo_group' : 'activityinfo_group'} # Only for display purpose. ACTIVITY_TYPE = {'NG': 'Numeric Graded', 'LG': 'Letter Graded', 'CNG': 'Calculated Numeric Graded', 'CLG': 'Calculated Letter Graded'} @login_required def course_info(request, course_slug): if is_course_student_by_slug(request, course_slug): return _course_info_student(request, course_slug) elif is_course_staff_by_slug(request, course_slug): return _course_info_staff(request, course_slug) else: return ForbiddenResponse(request) @requires_course_staff_by_slug def reorder_activity(request, course_slug): """ Ajax way to reorder activity. This ajax view function is called in the course_info page. """ course = get_object_or_404(CourseOffering, slug=course_slug) if request.method == 'POST': neaten_activity_positions(course) # find the activities in question id_up = request.POST.get('id_up') id_down = request.POST.get('id_down') if id_up == None or id_down == None: return ForbiddenResponse(request) # swap the position of the two activities activity_up = get_object_or_404(Activity, id=id_up, offering__slug=course_slug) activity_down = get_object_or_404(Activity, id=id_down, offering__slug=course_slug) activity_up.position, activity_down.position = activity_down.position, activity_up.position activity_up.save() activity_down.save() return HttpResponse("Order updated!") return ForbiddenResponse(request) def _course_info_staff(request, course_slug): """ Course front page """ course = get_object_or_404(CourseOffering, slug=course_slug) member = Member.objects.get(offering=course, person__userid=request.user.username, role__in=['INST','TA','APPR']) activities = all_activities_filter(offering=course) any_group = True in [a.group for a in activities] try: forum = Forum.objects.get(offering=course) forum_enabled = forum.enabled except Forum.DoesNotExist: forum_enabled = False # Non Ajax way to reorder activity, please also see reorder_activity view function for ajax way to reorder order = None act = None if 'order' in request.GET: order = request.GET['order'] if 'act' in request.GET: act = request.GET['act'] if order and act: reorder_course_activities(activities, act, order) return HttpResponseRedirect(reverse('offering:course_info', kwargs={'course_slug': course_slug})) # Todo: is the activity type necessary? activities_info = [] total_percent = 0 for activity in activities: if activity.percent: total_percent += activity.percent if isinstance(activity, NumericActivity): activities_info.append({'activity':activity, 'type':ACTIVITY_TYPE['NG']}) elif isinstance(activity, LetterActivity): activities_info.append({'activity':activity, 'type':ACTIVITY_TYPE['LG']}) if len(activities) == 0: num_pages = Page.objects.filter(offering=course) if num_pages == 0: messages.info(request, "Students won't see this course in their menu on the front page. As soon as some activities or pages have been added, they will see a link to the course info page.") discussion_activity = False if course.discussion: discussion_activity = discuss_activity.recent_activity(member) # advertise combined offering if applicable. offer_combined = course.joint_with() and len(activities) == 0 context = {'course': course, 'member': member, 'activities_info': activities_info, 'from_page': FROMPAGE['course'], 'order_type': ORDER_TYPE, 'any_group': any_group, 'total_percent': total_percent, 'discussion_activity': discussion_activity, 'offer_combined': offer_combined, 'forum_enabled': forum_enabled} return render(request, "grades/course_info_staff.html", context) @requires_course_staff_by_slug def course_config(request, course_slug): from forum.models import Forum course = get_object_or_404(CourseOffering, slug=course_slug) try: forum = Forum.objects.get(offering=course) except Forum.DoesNotExist: forum = Forum(offering=course) forum.enabled = False if request.method=="POST": form = CourseConfigForm(request.POST) if form.is_valid(): course.set_url(form.cleaned_data['url']) course.set_taemail(form.cleaned_data['taemail']) #if course.uses_svn(): # course.set_indiv_svn(form.cleaned_data['indiv_svn']) # course.set_instr_rw_svn(form.cleaned_data['instr_rw_svn']) course.set_group_min(form.cleaned_data['group_min']) course.set_group_max(form.cleaned_data['group_max']) course.save() forum.enabled = form.cleaned_data['forum'] forum.identity = form.cleaned_data['forum_identity'] forum.save() messages.success(request, 'Course config updated') #LOG EVENT# l = LogEntry(userid=request.user.username, description=("updated config for %s") % (course), related_object=course) l.save() return HttpResponseRedirect(reverse('offering:course_info', kwargs={'course_slug': course_slug})) else: form = CourseConfigForm({'url': course.url(), 'taemail': course.taemail(), 'forum': forum.enabled, 'forum_identity': forum.identity, 'indiv_svn': course.indiv_svn(), 'instr_rw_svn': course.instr_rw_svn(), 'group_min': course.group_min(),'group_max': course.group_max()}) context = {'course': course, 'form': form} return render(request, "grades/course_config.html", context) #@requires_course_student_by_slug def _course_info_student(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activities = all_activities_filter(offering=course) activities = [a for a in activities if a.status in ['RLS', 'URLS']] any_group = True in [a.group for a in activities] has_index = bool(Page.objects.filter(offering=course, label="Index", can_read__in=ACL_ROLES['STUD'])) try: forum = Forum.objects.get(offering=course) forum_enabled = forum.enabled except Forum.DoesNotExist: forum_enabled = False activity_data = [] student = Member.objects.get(offering=course, person__userid=request.user.username, role='STUD') for activity in activities: data = {} data['act'] = activity data['grade_display'] = activity.display_grade_student(student.person) activity_data.append(data) discussion_activity = False member = Member.objects.get(offering=course, person__userid=request.user.username, role='STUD') if course.discussion: discussion_activity = discuss_activity.recent_activity(member) context = {'course': course, 'member': student, 'activity_data': activity_data, 'any_group': any_group, 'has_index': has_index, 'from_page': FROMPAGE['course'], 'discussion_activity': discussion_activity, 'forum_enabled': forum_enabled} return render(request, "grades/course_info_student.html", context) @login_required def activity_info_oldurl(request, course_slug, activity_slug, tail): """ Redirect old activity URLs to new (somewhat intelligently: don't redirect if there's no activity there) """ course = get_object_or_404(CourseOffering, slug=course_slug) activity = get_object_or_404(Activity, slug=activity_slug, offering=course) act_url = reverse('offering:activity_info', kwargs={'course_slug': course.slug, 'activity_slug': activity.slug}) return HttpResponseRedirect(act_url + tail) @login_required def activity_info(request, course_slug, activity_slug): if is_course_student_by_slug(request, course_slug): return _activity_info_student(request, course_slug, activity_slug) elif is_course_staff_by_slug(request, course_slug): return _activity_info_staff(request, course_slug, activity_slug) else: return ForbiddenResponse(request) def _activity_info_staff(request, course_slug, activity_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activities = all_activities_filter(slug=activity_slug, offering=course) if len(activities) != 1: return NotFoundResponse(request) activity = activities[0] quiz = Quiz.objects.filter(activity=activity).first() # build list of all students and grades students = Member.objects.filter(role="STUD", offering=activity.offering).select_related('person') if activity.is_numeric(): grades_list = activity.numericgrade_set.filter().select_related('member__person', 'activity') else: grades_list = activity.lettergrade_set.filter().select_related('member__person', 'activity') grades = {} for g in grades_list: grades[g.member.person.userid_or_emplid()] = g source_grades = {} if activity.is_calculated() and not activity.is_numeric(): # calculated letter needs source grades too source_list = activity.numeric_activity.numericgrade_set.filter().select_related('member__person', 'activity') for g in source_list: source_grades[g.member.person.userid_or_emplid()] = g # collect group membership info group_membership = {} if activity.group: gms = GroupMember.objects.filter(activity_id=activity.id, confirmed=True).select_related('group', 'student__person', 'group__courseoffering') for gm in gms: group_membership[gm.student.person.userid_or_emplid()] = gm.group # collect submission status sub_comps = [sc.title for sc in SubmissionComponent.objects.filter(activity_id=activity.id, deleted=False)] submitted = {} if activity.group: subs = GroupSubmission.objects.filter(activity_id=activity.id).select_related('group') for s in subs: members = s.group.groupmember_set.filter(activity_id=activity.id) for m in members: submitted[m.student.person.userid_or_emplid()] = True else: subs = StudentSubmission.objects.filter(activity_id=activity.id) for s in subs: submitted[s.member.person.userid_or_emplid()] = True if bool(sub_comps) and not bool(activity.due_date): messages.warning(request, 'Students will not be able to submit: no due date/time is set.') # collect marking status mark_comps = [ac.title for ac in ActivityComponent.objects.filter(numeric_activity_id=activity.id, deleted=False)] marked = {} marks = StudentActivityMark.objects.filter(activity_id=activity.id).select_related('numeric_grade__member__person') for m in marks: marked[m.numeric_grade.member.person.userid_or_emplid()] = True if activity.group: # also collect group marks: attribute to both the group and members marks = GroupActivityMark.objects.filter(activity_id=activity.id).select_related('group') for m in marks: marked[m.group.slug] = True members = m.group.groupmember_set.filter(activity_id=activity.id).select_related('student__person') for m in members: marked[m.student.person.userid_or_emplid()] = True context = {'course': course, 'activity': activity, 'students': students, 'grades': grades, 'source_grades': source_grades, 'activity_view_type': 'individual', 'group_membership': group_membership, 'from_page': FROMPAGE['activityinfo'], 'sub_comps': sub_comps, 'mark_comps': mark_comps, 'submitted': submitted, 'marked': marked, 'quiz': quiz, } return render(request, 'grades/activity_info.html', context) def _activity_info_student(request, course_slug, activity_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activities = all_activities_filter(slug=activity_slug, offering=course) if len(activities) != 1: return NotFoundResponse(request) activity = activities[0] if activity.status=="INVI": return NotFoundResponse(request) student = Member.objects.get(offering=course, person__userid=request.user.username, role='STUD') grade = (activity.GradeClass).objects.filter(activity_id=activity.id, member=student) if activity.status != "RLS" or not grade: # shouldn't display or nothing in database: create temporary nograde object for the template grade = (activity.GradeClass)(activity_id=activity.id, member=student, flag="NOGR") else: grade = grade[0] # only display summary stats for courses with at least STUD_NUM_TO_DISP_ACTSTAT grades received reason_msg = '' if activity.is_numeric(): activity_stat, reason_msg = generate_numeric_activity_stat(activity, 'STUD') else: activity_stat, reason_msg = generate_letter_activity_stat(activity, 'STUD') try: quiz = activity.quiz completed = quiz.completed(student) incomplete_quiz = not completed reviewable_quiz = completed and (quiz.review != 'none') and (activity.status == 'RLS') except Quiz.DoesNotExist: incomplete_quiz = False reviewable_quiz = False context = {'course': course, 'activity': activity, 'grade': grade, 'activity_stat': activity_stat, 'reason_msg': reason_msg, 'incomplete_quiz': incomplete_quiz, 'reviewable_quiz': reviewable_quiz, } resp = render(request, 'grades/activity_info_student.html', context) resp.allow_gstatic_csp = True return resp @requires_course_staff_by_slug def activity_info_with_groups(request, course_slug, activity_slug): course = get_object_or_404(CourseOffering, slug = course_slug) activities = all_activities_filter(slug=activity_slug, offering=course) if len(activities) != 1: return NotFoundResponse(request) activity = activities[0] if not activity.group: return NotFoundResponse(request) # build list of group grades information all_members = GroupMember.objects.select_related('group', 'student__person', 'group__courseoffering').filter(activity = activity, confirmed = True) groups_found = {} grouped_students = 0 for member in all_members: grouped_students += 1 group = member.group student = member.student if group.id not in groups_found: # a new group discovered by its first member # get the current grade of the group current_mark = get_group_mark(activity, group) value = 'no grade' if current_mark is None else current_mark.mark new_group_grade_info = {'group': group, 'members': [student], 'grade': value} groups_found[group.id] = new_group_grade_info else: # add this member to its corresponding group info group_grade_info = groups_found[group.id] group_grade_info['members'].append(student) ungrouped_students = Member.objects.filter(offering = course, role = 'STUD').count() - grouped_students # collect submission status submitted = {} subs = GroupSubmission.objects.filter(activity_id=activity.id).select_related('group') for s in subs: submitted[s.group.slug] = True if isinstance(activity, NumericActivity): activity_type = ACTIVITY_TYPE['NG'] elif isinstance(activity, LetterActivity): activity_type = ACTIVITY_TYPE['LG'] # more activity info for display sub_comps = [sc.title for sc in SubmissionComponent.objects.filter(activity_id=activity.id, deleted=False)] mark_comps = [ac.title for ac in ActivityComponent.objects.filter(numeric_activity_id=activity.id, deleted=False)] context = {'course': course, 'activity_type': activity_type, 'activity': activity, 'ungrouped_students': ungrouped_students, 'activity_view_type': 'group', 'group_grade_info_list': list(groups_found.values()), 'from_page': FROMPAGE['activityinfo_group'], 'sub_comps': sub_comps, 'mark_comps': mark_comps, 'submitted': submitted} return render(request, 'grades/activity_info_with_groups.html', context) @requires_course_staff_by_slug def activity_stat(request, course_slug, activity_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activities = all_activities_filter(slug=activity_slug, offering=course) if len(activities) != 1: return NotFoundResponse(request) activity = activities[0] display_summary = True # always display for staff if activity.is_numeric(): activity_stat, _ = generate_numeric_activity_stat(activity, request.member.role) GradeClass = NumericGrade else: activity_stat, _ = generate_letter_activity_stat(activity, request.member.role) GradeClass = LetterGrade # counts submissions (individual & group) submark_stat = {} submark_stat['submittable'] = bool(SubmissionComponent.objects.filter(activity_id=activity.id)) submark_stat['studentsubmissons'] = len(set((s.member for s in StudentSubmission.objects.filter(activity_id=activity.id)))) submark_stat['groupsubmissons'] = len(set((s.group for s in GroupSubmission.objects.filter(activity_id=activity.id)))) # build counts of how many times each component has been submitted (by unique members/groups) sub_comps = select_all_components(activity) subed_comps = dict(((comp.id, set()) for comp in sub_comps)) # build dictionaries of submisson.id -> owner so we can look up quickly when scanning subid_dict = dict(((s.id, ("s", s.member_id)) for s in StudentSubmission.objects.filter(activity_id=activity.id))) subid_dict.update( dict(((s.id, ("g", s.group_id)) for s in GroupSubmission.objects.filter(activity_id=activity.id))) ) # build sets of who has submitted each SubmissionComponent for sc in select_all_submitted_components(activity_id=activity.id): if sc.component.deleted: # don't report on deleted components continue owner = subid_dict[sc.submission_id] # Add a sanity check to fix corrupt data if sc.component_id in subed_comps: subed_comps[sc.component_id].add(owner) # actual list of components and counts sub_comp_rows = [] for comp in sub_comps: data = {'comp': comp, 'count': len(subed_comps[comp.id])} sub_comp_rows.append(data) submark_stat['studentgrades'] = len(set([s.member for s in GradeClass.objects.filter(activity_id=activity.id)])) if activity.is_numeric(): submark_stat['markable'] = bool(ActivityComponent.objects.filter(numeric_activity_id=activity.id)) submark_stat['studentmarks'] = len(set([s.numeric_grade.member for s in StudentActivityMark.objects.filter(activity_id=activity.id)])) submark_stat['groupmarks'] = len(set([s.group for s in GroupActivityMark.objects.filter(activity_id=activity.id)])) else: submark_stat['markable'] = False context = {'course': course, 'activity': activity, 'activity_stat': activity_stat, 'display_summary': display_summary, 'submark_stat': submark_stat, 'sub_comp_rows': sub_comp_rows} resp = render(request, 'grades/activity_stat.html', context) resp.allow_gstatic_csp = True return resp @requires_course_staff_by_slug def activity_choice(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) context = {'course': course} return render(request, 'grades/activity_choice.html', context) @requires_course_staff_by_slug def edit_cutoffs(request, course_slug, activity_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activity = get_object_or_404(CalLetterActivity, slug=activity_slug, offering=course, deleted=False) if request.method == 'POST': form = CutoffForm(request.POST) if form.is_valid(): # All validation rules pass activity.set_cutoffs(form.cleaned_data['cutoffs']) activity.save() if form.cleaned_data['ap'] > activity.numeric_activity.max_grade: messages.warning(request, "Some grade cutoffs are higher than the maximum grade for %s." % (activity.numeric_activity.name)) #LOG EVENT# l = LogEntry(userid=request.user.username, description=("edited %s cutoffs") % (activity), related_object=activity) l.save() messages.success(request, "Grade cutoffs updated.") try: ignored = calculate_letter_grade(course, activity) if ignored == 1: messages.warning(request, "Did not calculate letter grade for 1 manually-graded student.") elif ignored > 1: messages.warning(request, "Did not calculate letter grade for %i manually-graded students." % (ignored)) except ValidationError as e: messages.error(request, e.args[0]) except NotImplementedError: return NotFoundResponse(request) return HttpResponseRedirect(reverse('offering:activity_info', kwargs={'course_slug': course.slug, 'activity_slug': activity.slug})) else: cutoff=activity.get_cutoffs() cutoffsdict=_cutoffsdict(cutoff) form=CutoffForm(cutoffsdict) source_grades = activity.numeric_activity.numericgrade_set.exclude(flag="NOGR") source_grades = '[' + ", ".join(["%.2f" % (g.value) for g in source_grades]) + ']' context = {'course': course, 'activity': activity, 'cutoff':form, 'source_grades': source_grades} resp = render(request, 'grades/edit_cutoffs.html', context) resp.allow_gstatic_csp = True return resp def _cutoffsdict(cutoff): data = dict() data['ap'] = cutoff[0] data['a'] = cutoff[1] data['am'] = cutoff[2] data['bp'] = cutoff[3] data['b'] = cutoff[4] data['bm'] = cutoff[5] data['cp'] = cutoff[6] data['c'] = cutoff[7] data['cm'] = cutoff[8] data['d'] = cutoff[9] return data @requires_course_staff_by_slug def compare_official(request, course_slug, activity_slug): """ Screen to compare member.official_grade to this letter activity """ course = get_object_or_404(CourseOffering, slug=course_slug) activity = get_object_or_404(LetterActivity, slug=activity_slug, offering=course, deleted=False) members = Member.objects.filter(offering=course, role='STUD') grades = dict(((g.member, g.letter_grade)for g in LetterGrade.objects.filter(activity_id=activity.id).exclude(flag='NOGR'))) data = [] for m in members: if m in grades: g = grades[m] else: g = None data.append((m, g, m.official_grade!=g)) #print data context = {'course': course, 'activity': activity, 'data': data} return render(request, 'grades/compare_official.html', context) from dashboard.letters import grade_change_form @requires_course_staff_by_slug def grade_change(request, course_slug, activity_slug, userid): """ Produce grade change form """ course = get_object_or_404(CourseOffering, slug=course_slug) activity = get_object_or_404(LetterActivity, slug=activity_slug, offering=course, deleted=False) member = get_object_or_404(Member, ~Q(role='DROP'), find_member(userid), offering__slug=course_slug) user = Person.objects.get(userid=request.user.username) grades = LetterGrade.objects.filter(activity_id=activity.id, member=member).exclude(flag='NOGR') if grades: grade = grades[0].letter_grade else: grade = None response = HttpResponse(content_type="application/pdf") response['Content-Disposition'] = 'inline; filename="%s-gradechange.pdf"' % (userid) grade_change_form(member, member.official_grade, grade, user, response) return response @requires_course_staff_by_slug def add_numeric_activity(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activities_list = [(None, '\u2014'),] activities = all_activities_filter(course) for a in activities: if a.group == True: activities_list.append((a.slug, a.name)) if request.method == 'POST': # If the form has been submitted... form = NumericActivityForm(request.POST, previous_activities=activities_list) # A form bound to the POST data form.activate_addform_validation(course_slug) if form.is_valid(): # All validation rules pass try: aggr_dict = Activity.objects.filter(offering=course).aggregate(Max('position')) if not aggr_dict['position__max']: position = 1 else: position = aggr_dict['position__max'] + 1 config = { 'showstats': form.cleaned_data['showstats'], 'showhisto': form.cleaned_data['showhisto'], 'url': form.cleaned_data['url'], } a = NumericActivity.objects.create(name=form.cleaned_data['name'], short_name=form.cleaned_data['short_name'], status=form.cleaned_data['status'], due_date=form.cleaned_data['due_date'], percent=form.cleaned_data['percent'], max_grade=form.cleaned_data['max_grade'], offering=course, position=position, group=GROUP_STATUS_MAP[form.cleaned_data['group']], config=config) if a.group == True and form.cleaned_data['extend_group'] is not None: a2 = [i for i in activities if i.slug == form.cleaned_data['extend_group']] if len(a2) > 0: add_activity_to_group(a, a2[0], course) #LOG EVENT# l = LogEntry(userid=request.user.username, description=("created a numeric activity %s") % (a), related_object=a) l.save() except NotImplementedError: return NotFoundResponse(request) messages.success(request, 'New activity "%s" added' % a.name) _semester_date_warning(request, a) return HttpResponseRedirect(reverse('offering:course_info', kwargs={'course_slug': course_slug})) else: messages.error(request, "Please correct the error below") else: form = NumericActivityForm(previous_activities=activities_list) context = {'course': course, 'form': form, 'form_type': FORMTYPE['add']} return render(request, 'grades/numeric_activity_form.html', context) @requires_course_staff_by_slug def add_cal_numeric_activity(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) numeric_activities = NumericActivity.objects.filter(offering=course, deleted=False) if request.method == 'POST': # If the form has been submitted... form = CalNumericActivityForm(request.POST) # A form bound to the POST data form.activate_addform_validation(course_slug) if form.is_valid(): # All validation rules pass try: aggr_dict = Activity.objects.filter(offering=course).aggregate(Max('position')) if not aggr_dict['position__max']: position = 1 else: position = aggr_dict['position__max'] + 1 config = { 'showstats': form.cleaned_data['showstats'], 'showhisto': form.cleaned_data['showhisto'], 'calculation_leak': form.cleaned_data['calculation_leak'], 'url': form.cleaned_data['url'], } CalNumericActivity.objects.create(name=form.cleaned_data['name'], short_name=form.cleaned_data['short_name'], status=form.cleaned_data['status'], percent=form.cleaned_data['percent'], max_grade=form.cleaned_data['max_grade'], formula=form.cleaned_data['formula'], offering=course, position=position, group=False, config=config) except NotImplementedError: return NotFoundResponse(request) messages.success(request, 'New activity "%s" added' % form.cleaned_data['name']) return HttpResponseRedirect(reverse('offering:course_info', kwargs={'course_slug': course_slug})) else: messages.error(request, "Please correct the error below") else: form = CalNumericActivityForm(initial={'formula': '[[activitytotal]]'}) context = {'course': course, 'form': form, 'numeric_activities': numeric_activities, 'form_type': FORMTYPE['add']} resp = render(request, 'grades/cal_numeric_activity_form.html', context) resp.has_inline_script = True # insert activity in formula links return resp @requires_course_staff_by_slug def add_cal_letter_activity(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) letter_activities = LetterActivity.objects.filter(offering=course) numact_choices = [(na.pk, na.name) for na in NumericActivity.objects.filter(offering=course, deleted=False)] examact_choices = [(0, '\u2014')] + [(na.pk, na.name) for na in Activity.objects.filter(offering=course, deleted=False)] if request.method == 'POST': # If the form has been submitted... form = CalLetterActivityForm(request.POST) # A form bound to the POST data form.fields['numeric_activity'].choices = numact_choices form.fields['exam_activity'].choices = examact_choices form.activate_addform_validation(course_slug) if form.is_valid(): # All validation rules pass try: aggr_dict = Activity.objects.filter(offering=course).aggregate(Max('position')) if not aggr_dict['position__max']: position = 1 else: position = aggr_dict['position__max'] + 1 if form.cleaned_data['exam_activity'] == '0': exam_activity_id = None else: exam_activity = Activity.objects.get(pk=form.cleaned_data['exam_activity']) exam_activity_id = exam_activity.id config = { 'showstats': form.cleaned_data['showstats'], 'showhisto': form.cleaned_data['showhisto'], 'url': form.cleaned_data['url'], } CalLetterActivity.objects.create(name=form.cleaned_data['name'], short_name=form.cleaned_data['short_name'], status=form.cleaned_data['status'], numeric_activity=NumericActivity.objects.get(pk=form.cleaned_data['numeric_activity']), exam_activity_id=exam_activity_id, offering=course, position=position, group=False, config=config) except NotImplementedError: return NotFoundResponse(request) messages.success(request, 'New activity "%s" added' % form.cleaned_data['name']) return HttpResponseRedirect(reverse('offering:course_info', kwargs={'course_slug': course_slug})) else: messages.error(request, "Please correct the error below") else: form = CalLetterActivityForm() form.fields['numeric_activity'].choices = numact_choices form.fields['exam_activity'].choices = examact_choices context = {'course': course, 'form': form, 'letter_activities': letter_activities, 'form_type': FORMTYPE['add']} return render(request, 'grades/cal_letter_activity_form.html', context) @requires_course_staff_by_slug def formula_tester(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) numeric_activities = NumericActivity.objects.filter(offering=course, deleted=False) result = "" if 'formula' in request.GET: # If the form has been submitted... activity_entries = [] faked_activities = [] # used to evaluate the formula has_error = False for numeric_activity in numeric_activities: activity_form_entry = ActivityFormEntry(request.GET, prefix=numeric_activity.slug) if not activity_form_entry.is_valid(): has_error = True else: value = activity_form_entry.cleaned_data['value'] if not value: value = 0 faked_activities.append(FakeActivity(numeric_activity.name, numeric_activity.short_name, activity_form_entry.cleaned_data['status'], numeric_activity.max_grade, numeric_activity.percent, value)) activity_entries.append(FormulaTesterActivityEntry(numeric_activity, activity_form_entry)) formula_form_entry = FormulaFormEntry(request.GET) formula_form_entry.activate_form_entry_validation(course_slug, None) if not formula_form_entry.is_valid(): has_error = True if has_error: messages.error(request, "Please correct the error below") else: parsed_expr = pickle.loads(formula_form_entry.pickled_formula) act_dict = activities_dictionary(faked_activities) try: result = eval_parse(parsed_expr, FakeEvalActivity(course), act_dict, None, True) except EvalException: messages.error(request, "Can not evaluate formula") else: activity_entries = [] for numeric_activity in numeric_activities: activity_form_entry = ActivityFormEntry(prefix=numeric_activity.slug) activity_entries.append(FormulaTesterActivityEntry(numeric_activity, activity_form_entry)) formula_form_entry = FormulaFormEntry() context = {'course': course, 'activity_entries': activity_entries, 'formula_form_entry': formula_form_entry, 'result': result} return render(request, 'grades/formula_tester.html', context) @requires_course_staff_by_slug def calculate_all(request, course_slug, activity_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activity = get_object_or_404(CalNumericActivity, slug=activity_slug, offering=course, deleted=False) try: ignored, hiding_info = calculate_numeric_grade(course,activity) if hiding_info: messages.warning(request, "This activity is released to students, but the calculation uses unreleased grades. Calculations done with unreleased activities as zero to prevent leaking hidden info to students.") if ignored==1: messages.warning(request, "Did not calculate grade for 1 manually-graded student.") elif ignored>1: messages.warning(request, "Did not calculate grade for %i manually-graded students." % (ignored)) except ValidationError as e: messages.error(request, e.args[0]) except EvalException as e: messages.error(request, e.args[0]) except NotImplementedError: return NotFoundResponse(request) return HttpResponseRedirect(activity.get_absolute_url()) @requires_course_staff_by_slug def calculate_all_lettergrades(request, course_slug, activity_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activity = get_object_or_404(CalLetterActivity, slug=activity_slug, offering=course, deleted=False) try: ignored = calculate_letter_grade(course,activity) if ignored==1: messages.warning(request, "Did not calculate letter grade for 1 manually-graded student.") elif ignored>1: messages.warning(request, "Did not calculate letter grade for %i manually-graded students." % (ignored)) except ValidationError as e: messages.error(request, e.args[0]) except NotImplementedError: return NotFoundResponse(request) return HttpResponseRedirect(activity.get_absolute_url()) @requires_course_staff_by_slug def calculate_individual_ajax(request, course_slug, activity_slug): """ Ajax way to calculate individual numeric grade. This ajav view function is called in the activity_info page. """ if request.method == 'POST': userid = request.POST.get('userid') if userid == None: return ForbiddenResponse(request) course = get_object_or_404(CourseOffering, slug=course_slug) activity = get_object_or_404(CalNumericActivity, slug=activity_slug, offering=course, deleted=False) member = get_object_or_404(Member, offering=course, person__userid=userid, role='STUD') try: displayable_result, _ = calculate_numeric_grade(course,activity, member) except ValidationError: return ForbiddenResponse(request) except EvalException: return ForbiddenResponse(request) except NotImplementedError: return ForbiddenResponse(request) return HttpResponse(displayable_result) return ForbiddenResponse(request) def _create_activity_formdatadict(activity): if not [activity for activity_type in ACTIVITY_TYPES if isinstance(activity, activity_type)]: return data = dict() data['name'] = activity.name data['short_name'] = activity.short_name data['status'] = activity.status data['due_date'] = activity.due_date data['percent'] = activity.percent data['url'] = '' if 'url' in activity.config: data['url'] = activity.config['url'] data['showstats'] = True if 'showstats' in activity.config: data['showstats'] = activity.config['showstats'] data['showhisto'] = True if 'showhisto' in activity.config: data['showhisto'] = activity.config['showhisto'] if 'calculation_leak' in activity.config: data['calculation_leak'] = activity.config['calculation_leak'] for (k, v) in list(GROUP_STATUS_MAP.items()): if activity.group == v: data['group'] = k if isinstance(activity, NumericActivity): data['max_grade'] = activity.max_grade if isinstance(activity, CalNumericActivity): data['formula'] = activity.formula if isinstance(activity, CalLetterActivity): data['numeric_activity'] = activity.numeric_activity_id data['exam_activity'] = activity.exam_activity_id return data def _populate_activity_from_formdata(activity, data): if not [activity for activity_type in ACTIVITY_TYPES if isinstance(activity, activity_type)]: return if 'name' in data: activity.name = data['name'] if 'short_name' in data: activity.short_name = data['short_name'] if 'status' in data: activity.status = data['status'] if 'due_date' in data: activity.due_date = data['due_date'] if 'percent' in data: activity.percent = data['percent'] if 'group' in data: activity.group = GROUP_STATUS_MAP[data['group']] if 'max_grade' in data: activity.max_grade = data['max_grade'] if 'formula' in data: activity.formula = data['formula'] if 'url' in data: activity.config['url'] = data['url'] if 'showstats' in data: activity.config['showstats'] = data['showstats'] if 'showhisto' in data: activity.config['showhisto'] = data['showhisto'] if 'calculation_leak' in data: activity.config['calculation_leak'] = data['calculation_leak'] if 'numeric_activity' in data: activity.numeric_activity = NumericActivity.objects.get(pk=data['numeric_activity']) if 'exam_activity' in data: try: activity.exam_activity = Activity.objects.get(pk=data['exam_activity']) except Activity.DoesNotExist: activity.exam_activity = None def _semester_date_warning(request, activity): """ Generate warnings for this request if activity due date is outside semester boundaries. """ if not activity.due_date: return # don't warn for 24 hours after the last day of classes (start of last day + 48 hours) if activity.due_date > datetime.datetime.combine( activity.offering.semester.end, datetime.time(0,0,0)) + datetime.timedelta(hours=48): messages.warning(request, "Activity is due after the end of the semester.") if activity.due_date < datetime.datetime.combine( activity.offering.semester.start, datetime.time(0,0,0)): messages.warning(request, "Activity is due before the start of the semester.") @requires_course_staff_by_slug def edit_activity(request, course_slug, activity_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activities = all_activities_filter(slug=activity_slug, offering=course) numact_choices = [(na.pk, na.name) for na in NumericActivity.objects.filter(offering=course, deleted=False)] examact_choices = [(0, '\u2014')] + [(na.pk, na.name) for na in Activity.objects.filter(offering=course, deleted=False)] if (len(activities) == 1): activity = activities[0] # extend group options activities_list = [(None, '\u2014'),] activities = all_activities_filter(offering=course) for a in activities: if a.group == True and a.id != activity.id: activities_list.append((a.slug, a.name)) from_page = request.GET.get('from_page') if request.method == 'POST': # If the form has been submitted... if isinstance(activity, CalNumericActivity): form = CalNumericActivityForm(request.POST) elif isinstance(activity, NumericActivity): form = NumericActivityForm(request.POST, previous_activities=activities_list) elif isinstance(activity, CalLetterActivity): form = CalLetterActivityForm(request.POST) form.fields['numeric_activity'].choices = numact_choices form.fields['exam_activity'].choices = examact_choices elif isinstance(activity, LetterActivity): form = LetterActivityForm(request.POST, previous_activities=activities_list) form.activate_editform_validation(course_slug, activity_slug) if form.is_valid(): # All validation rules pass _populate_activity_from_formdata(activity, form.cleaned_data) if activity.group == True and form.cleaned_data['extend_group'] is not None: a2 = [i for i in activities if i.slug == form.cleaned_data['extend_group']] if len(a2) > 0: add_activity_to_group(activity, a2[0], course) activity.save(entered_by=request.user.username) #LOG EVENT# l = LogEntry(userid=request.user.username, description=("edited %s") % (activity), related_object=activity) l.save() messages.success(request, "Details of %s updated" % activity.name) _semester_date_warning(request, activity) if from_page == FROMPAGE['course']: return HttpResponseRedirect(reverse('offering:course_info', kwargs={'course_slug': course_slug})) else: return HttpResponseRedirect(reverse('offering:activity_info', kwargs={'course_slug': course_slug, 'activity_slug': activity.slug})) else: messages.error(request, "Please correct the error below") else: datadict = _create_activity_formdatadict(activity) if isinstance(activity, CalNumericActivity): form = CalNumericActivityForm(initial=datadict) elif isinstance(activity, NumericActivity): form = NumericActivityForm(initial=datadict, previous_activities=activities_list) elif isinstance(activity, CalLetterActivity): form = CalLetterActivityForm(initial=datadict) form.fields['numeric_activity'].choices = numact_choices form.fields['exam_activity'].choices = examact_choices # set initial value in form to current value elif isinstance(activity, LetterActivity): form = LetterActivityForm(initial=datadict, previous_activities=activities_list) elif isinstance(activity, CalLetterActivity): form = CalLetterActivityForm(initial=datadict) form.fields['numeric_activity'].choices = numact_choices form.fields['exam_activity'].choices = examact_choices form.activate_editform_validation(course_slug, activity_slug) if isinstance(activity, CalNumericActivity): numeric_activities = NumericActivity.objects.exclude(slug=activity_slug).filter(offering=course, deleted=False) context = {'course': course, 'activity': activity, 'form': form, 'numeric_activities': numeric_activities, 'form_type': FORMTYPE['edit'], 'from_page': from_page} resp = render(request, 'grades/cal_numeric_activity_form.html', context) resp.has_inline_script = True # insert activity in formula links return resp elif isinstance(activity, NumericActivity): context = {'course': course, 'activity': activity, 'form': form, 'form_type': FORMTYPE['edit'], 'from_page': from_page} return render(request, 'grades/numeric_activity_form.html', context) elif isinstance(activity, CalLetterActivity): context = {'course': course, 'activity': activity, 'form': form, 'form_type': FORMTYPE['edit'], 'from_page': from_page} return render(request, 'grades/cal_letter_activity_form.html', context) elif isinstance(activity, LetterActivity): context = {'course': course, 'activity': activity, 'form': form, 'form_type': FORMTYPE['edit'], 'from_page': from_page} return render(request, 'grades/letter_activity_form.html', context) else: return NotFoundResponse(request) @requires_course_staff_by_slug def delete_activity(request, course_slug, activity_slug): """ Flag activity as deleted """ course = get_object_or_404(CourseOffering, slug=course_slug) activity = get_object_or_404(Activity, slug=activity_slug, offering=course) if request.method == 'POST': if not Member.objects.filter(offering=course, person__userid=request.user.username, role="INST"): # only instructors can delete return ForbiddenResponse(request, "Only instructors can delete activities") activity.safely_delete() messages.success(request, 'Activity deleted. It can be restored by the system adminstrator in an emergency.') #LOG EVENT# l = LogEntry(userid=request.user.username, description=("activity %s marked deleted") % (activity), related_object=course) l.save() return HttpResponseRedirect(reverse('offering:course_info', kwargs={'course_slug': course.slug})) else: return ForbiddenResponse(request) @requires_course_staff_by_slug def release_activity(request, course_slug, activity_slug): """ Bump activity status: INVI -> URLS, URLS -> RLS. """ course = get_object_or_404(CourseOffering, slug=course_slug) activity = get_object_or_404(Activity, slug=activity_slug, offering=course, deleted=False) if request.method == 'POST': if activity.status == "INVI": activity.status = "URLS" activity.save(entered_by=request.user.username) messages.success(request, 'Activity made visible to students (but grades are still unreleased).') #LOG EVENT# l = LogEntry(userid=request.user.username, description=("activity %s made visible") % (activity), related_object=course) l.save() elif activity.status == "URLS": activity.status = "RLS" activity.save(entered_by=request.user.username) messages.success(request, 'Grades released to students.') #LOG EVENT# l = LogEntry(userid=request.user.username, description=("activity %s grades released") % (activity), related_object=course) l.save() return HttpResponseRedirect(reverse('offering:activity_info', kwargs={'course_slug': course.slug, 'activity_slug': activity.slug})) else: return ForbiddenResponse(request) @requires_course_staff_by_slug def add_letter_activity(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activities_list = [(None, '\u2014'),] activities = all_activities_filter(course) for a in activities: if a.group == True: activities_list.append((a.slug, a.name)) if request.method == 'POST': # If the form has been submitted... form = LetterActivityForm(request.POST, previous_activities=activities_list) # A form bound to the POST data form.activate_addform_validation(course_slug) if form.is_valid(): # All validation rules pass #try: aggr_dict = Activity.objects.filter(offering=course).aggregate(Max('position')) if not aggr_dict['position__max']: position = 1 else: position = aggr_dict['position__max'] + 1 config = { 'showstats': form.cleaned_data['showstats'], 'showhisto': form.cleaned_data['showhisto'], 'url': form.cleaned_data['url'], } a = LetterActivity.objects.create(name=form.cleaned_data['name'], short_name=form.cleaned_data['short_name'], status=form.cleaned_data['status'], due_date=form.cleaned_data['due_date'], percent=form.cleaned_data['percent'], offering=course, position=position, group=GROUP_STATUS_MAP[form.cleaned_data['group']], config=config) if a.group == True and form.cleaned_data['extend_group'] is not None: a2 = [i for i in activities if i.slug == form.cleaned_data['extend_group']] if len(a2) > 0: add_activity_to_group(a, a2[0], course) #LOG EVENT# l = LogEntry(userid=request.user.username, description=("created a letter-graded activity %s") % (a), related_object=a) l.save() messages.success(request, 'New activity "%s" added' % a.name) _semester_date_warning(request, a) return HttpResponseRedirect(reverse('offering:course_info', kwargs={'course_slug': course_slug})) else: form = LetterActivityForm(previous_activities=activities_list) activities = course.activity_set.all() context = {'course': course, 'form': form, 'form_type': FORMTYPE['add']} return render(request, 'grades/letter_activity_form.html', context) @requires_course_staff_by_slug def all_grades(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activities = all_activities_filter(offering=course) students = Member.objects.filter(offering=course, role="STUD").select_related('person', 'offering') # get grade data into a format we can work with grades = {} for a in activities: grades[a.slug] = {} if hasattr(a, 'numericgrade_set'): gs = a.numericgrade_set.all().select_related('member', 'member__person') else: gs = a.lettergrade_set.all().select_related('member', 'member__person') for g in gs: grades[a.slug][g.member.person.userid] = g context = {'course': course, 'students': students, 'activities': activities, 'grades': grades} return render(request, 'grades/all_grades.html', context) def _all_grades_output(response, course): activities = all_activities_filter(offering=course) students = Member.objects.filter(offering=course, role="STUD").select_related('person') # get grade data into a format we can work with labtut = course.labtut grades = {} for a in activities: grades[a.slug] = {} if hasattr(a, 'numericgrade_set'): gs = a.numericgrade_set.all().select_related('member', 'member__person') else: gs = a.lettergrade_set.all().select_related('member', 'member__person') for g in gs: grades[a.slug][g.member.person.userid] = g # output results writer = csv.writer(response) row = ['Last name', 'First name', Person.userid_header(), Person.emplid_header()] if labtut: row.append('Lab/Tutorial') for a in activities: row.append(a.short_name) writer.writerow(row) for s in students: row = [s.person.last_name, s.person.first_name, s.person.userid, s.person.emplid] if labtut: row.append(s.labtut_section or '') for a in activities: try: gr = grades[a.slug][s.person.userid] if gr.flag=='NOGR': g = '' else: if a.is_numeric(): g = gr.value else: g = gr.letter_grade except KeyError: g = '' row.append(g) writer.writerow(row) @requires_course_staff_by_slug def all_grades_csv(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="%s.csv"' % (course_slug) _all_grades_output(response, course) return response @requires_course_staff_by_slug def grade_history(request, course_slug): """ Dump all GradeHistory for the offering to a CSV """ offering = get_object_or_404(CourseOffering, slug=course_slug) response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'inline; filename="%s-history.csv"' % (course_slug,) writer = csv.writer(response) writer.writerow(['Date/Time', 'Activity', 'Student', 'Entered By', 'Numeric Grade', 'Letter Grade', 'Status', 'Group']) grade_histories = GradeHistory.objects.filter(activity__offering=offering, status_change=False) \ .select_related('entered_by', 'activity', 'member__person', 'group') for gh in grade_histories: writer.writerow([ gh.timestamp, gh.activity.short_name, gh.member.person.userid_or_emplid(), gh.entered_by.userid_or_emplid(), gh.numeric_grade, gh.letter_grade, FLAGS.get(gh.grade_flag, None), gh.group.slug if gh.group else None, ]) return response @requires_course_staff_by_slug def class_list(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) members = Member.objects.filter(offering=course, role="STUD").select_related('person', 'offering') gms = GroupMember.objects.filter(confirmed=True, student__offering=course).select_related('group', 'group__courseoffering') groups = {} for gm in gms: gs = groups.get(gm.student_id, set()) groups[gm.student_id] = gs gs.add(gm.group) rows = [] for m in members: data = {'member': m, 'groups': groups.get(m.id, [])} rows.append(data) context = {'course': course, 'rows': rows} return render(request, 'grades/class_list.html', context) def has_photo_agreement(user): configs = UserConfig.objects.filter(user=user, key='photo-agreement') return bool(configs and configs[0].value['agree']) PHOTO_LIST_STYLES = set(['table', 'horiz', 'signin']) @requires_course_staff_by_slug def photo_list(request, course_slug, style='horiz'): if style not in PHOTO_LIST_STYLES: raise Http404 user = get_object_or_404(Person, userid=request.user.username) if not has_photo_agreement(user): url = reverse('config:photo_agreement') + '?return=' + urllib.parse.quote(request.path) return ForbiddenResponse(request, mark_safe('You must <a href="%s">confirm the photo usage agreement</a> before seeing student photos.' % (url))) course = get_object_or_404(CourseOffering, slug=course_slug) members = Member.objects.filter(offering=course, role="STUD").select_related('person', 'offering') # fire off a task to fetch the photos and warm the cache pre_fetch_photos(m.person.emplid for m in members) context = {'course': course, 'members': members} return render(request, 'grades/photo_list_%s.html' % (style), context) @login_required def student_photo(request, emplid): # confirm user's photo agreement user = get_object_or_404(Person, userid=request.user.username) can_access = False if Role.objects_fresh.filter(person=user, role__in=['ADVS', 'ADVM']): can_access = True else: if not has_photo_agreement(user): url = reverse('config:photo_agreement') + '?return=' + urllib.parse.quote(request.path) return ForbiddenResponse(request, mark_safe('You must <a href="%s">confirm the photo usage agreement</a> before seeing student photos.' % (url))) # confirm user is an instructor of this student (within the last two years) # TODO: cache past_semester to save the query? past_semester = Semester.get_semester(datetime.date.today() - datetime.timedelta(days=730)) student_members = Member.objects.filter(offering__semester__name__gte=past_semester.name, person__emplid=emplid, role='STUD').select_related('offering') student_offerings = [m.offering for m in student_members] instructor_of = Member.objects.filter(person=user, role='INST', offering__in=student_offerings) can_access = (instructor_of.count() > 0) if not can_access: return ForbiddenResponse(request, 'You must be an instructor of this student.') # get the photo data, status = photo_for_view(emplid) # return the photo response = HttpResponse(data, content_type='image/jpeg') response.status_code = status response['Content-Disposition'] = 'inline; filename="%s.jpg"' % (emplid) response['Cache-Control'] = 'private, max-age=300' response.slow_okay = True return response @requires_course_staff_by_slug def new_message(request, course_slug): offering = get_object_or_404(CourseOffering, slug=course_slug) staff = get_object_or_404(Person, userid=request.user.username) default_message = NewsItem(user=staff, author=staff, course=offering, source_app="dashboard") if request.method =='POST': form = MessageForm(data=request.POST, instance=default_message) if form.is_valid()==True: NewsItem.for_members(member_kwargs={'offering': offering}, newsitem_kwargs={ 'author': staff, 'course': offering, 'source_app': 'dashboard', 'title': form.cleaned_data['title'], 'content': form.cleaned_data['content'], 'url': form.cleaned_data['url'], 'markup': form.cleaned_data['_markup']}) #LOG EVENT# l = LogEntry(userid=request.user.username, description=("created a message for every student in %s") % (offering), related_object=offering) l.save() messages.add_message(request, messages.SUCCESS, 'News item created.') return HttpResponseRedirect(reverse('offering:course_info', kwargs={'course_slug': offering.slug})) else: form = MessageForm() return render(request, "grades/new_message.html", {"form" : form,'course': offering}) @requires_course_staff_by_slug def student_search(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) if request.method == 'POST': # find the student if we can and redirect to info page form = StudentSearchForm(request.POST) if not form.is_valid(): messages.add_message(request, messages.ERROR, 'Invalid search') context = {'course': course, 'form': form} return render(request, 'grades/student_search.html', context) search = form.cleaned_data['search'] try: int(search) students = Member.objects.filter(offering=course, role="STUD").filter(Q(person__userid=search) | Q(person__emplid=search)) except ValueError: students = Member.objects.filter(offering=course, role="STUD").filter(person__userid=search) if len(students)!=1: if len(students)==0: messages.add_message(request, messages.ERROR, 'No student found') else: messages.add_message(request, messages.ERROR, 'Multiple students found') context = {'course': course, 'form': form} return render(request, 'grades/student_search.html', context) student = students[0] return HttpResponseRedirect(reverse('offering:student_info', kwargs={'course_slug': course_slug, 'userid': student.person.userid})) form = StudentSearchForm() context = {'course': course, 'form': form} return render(request, 'grades/student_search.html', context) @requires_course_staff_by_slug def student_info(request, course_slug, userid): course = get_object_or_404(CourseOffering, slug=course_slug) member = get_object_or_404(Member, ~Q(role='DROP'), find_member(userid), offering__slug=course_slug) requestor = get_object_or_404(Member, ~Q(role='DROP'), person__userid=request.user.username, offering__slug=course_slug) activities = all_activities_filter(offering=course) if member.role != "STUD": return NotFoundResponse(request) grade_info = [] for a in activities: info = {'act': a} # get grade if hasattr(a, 'numericgrade_set'): gs = a.numericgrade_set.filter(member=member) else: gs = a.lettergrade_set.filter(member=member) if gs: info['grade'] = gs[0] else: info['grade'] = None # find most recent submission sub_info = SubmissionInfo(student=member.person, activity=a) info['sub'] = sub_info.have_submitted() grade_info.append(info) # find marking info info['marked'] = False if StudentActivityMark.objects.filter(activity_id=a.id, numeric_grade__member=member): info['marked'] = True gms = GroupMember.objects.filter(activity_id=a.id, student=member, confirmed=True) if gms: # in a group gm = gms[0] if GroupActivityMark.objects.filter(activity_id=a.id, group=gm.group): info['marked'] = True dishonesty_cases = [] if requestor.role in ['INST', 'APPR']: from discipline.models import DisciplineCaseInstrStudent<|fim▁hole|> #grade_history = GradeHistory.objects.filter(member=member).select_related('entered_by', 'activity', 'group', 'mark') context = {'course': course, 'member': member, 'grade_info': grade_info, 'group_memberships': group_memberships, 'grade_history': grade_history, 'dishonesty_cases': dishonesty_cases, 'can_photo': has_photo_agreement(requestor.person)} return render(request, 'grades/student_info.html', context) @requires_course_staff_by_slug def export_all(request, course_slug): """ Export everything we can about this offering """ import io, tempfile, zipfile, os, json from django.http import StreamingHttpResponse from wsgiref.util import FileWrapper from marking.views import _mark_export_data, _DecimalEncoder from discuss.models import DiscussionTopic course = get_object_or_404(CourseOffering, slug=course_slug) handle, filename = tempfile.mkstemp('.zip') os.close(handle) z = zipfile.ZipFile(filename, 'w') # add all grades CSV allgrades = io.StringIO() _all_grades_output(allgrades, course) z.writestr("grades.csv", allgrades.getvalue()) allgrades.close() # add marking data acts = all_activities_filter(course) for a in acts: if ActivityComponent.objects.filter(numeric_activity_id=a.id): markingdata = _mark_export_data(a) markout = io.StringIO() json.dump({'marks': markingdata}, markout, cls=_DecimalEncoder, indent=1) z.writestr(a.slug + "-marking.json", markout.getvalue()) del markout, markingdata # add submissions acts = all_activities_filter(course) for a in acts: submission_info = SubmissionInfo.for_activity(a) submission_info.get_all_components() submission_info.generate_submission_contents(z, prefix=a.slug+'-submissions' + os.sep, always_summary=False) # add discussion if course.discussion(): topics = DiscussionTopic.objects.filter(offering=course).order_by('-pinned', '-last_activity_at') discussion_data = [t.exportable() for t in topics] discussout = io.StringIO() json.dump(discussion_data, discussout, indent=1) z.writestr("discussion.json", discussout.getvalue()) del discussion_data, discussout # return the zip file z.close() zipdata = open(filename, 'rb') response = StreamingHttpResponse(FileWrapper(zipdata), content_type='application/zip') response['Content-Length'] = os.path.getsize(filename) response['Content-Disposition'] = 'attachment; filename="' + course.slug + '.zip"' try: os.remove(filename) except OSError: pass return response<|fim▁end|>
dishonesty_cases = DisciplineCaseInstrStudent.objects.filter(offering=course, student=member.person) group_memberships = GroupMember.objects.filter(student=member, activity__offering__slug=course_slug) grade_history = GradeHistory.objects.filter(member=member, status_change=False).select_related('entered_by', 'activity', 'group', 'mark')
<|file_name|>polyfills.js<|end_file_name|><|fim▁begin|>// Polyfills if ( Number.EPSILON === undefined ) { Number.EPSILON = Math.pow( 2, - 52 ); } if ( Number.isInteger === undefined ) { // Missing in IE // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isInteger Number.isInteger = function ( value ) { return typeof value === 'number' && isFinite( value ) && Math.floor( value ) === value; }; } // if ( Math.sign === undefined ) { // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/sign Math.sign = function ( x ) { return ( x < 0 ) ? - 1 : ( x > 0 ) ? 1 : + x; }; } if ( 'name' in Function.prototype === false ) { // Missing in IE // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name Object.defineProperty( Function.prototype, 'name', { get: function () { return this.toString().match( /^\s*function\s*([^\(\s]*)/ )[ 1 ]; } } ); } if ( Object.assign === undefined ) { // Missing in IE // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/assign Object.assign = function ( target ) { 'use strict'; if ( target === undefined || target === null ) { <|fim▁hole|> } const output = Object( target ); for ( let index = 1; index < arguments.length; index ++ ) { const source = arguments[ index ]; if ( source !== undefined && source !== null ) { for ( const nextKey in source ) { if ( Object.prototype.hasOwnProperty.call( source, nextKey ) ) { output[ nextKey ] = source[ nextKey ]; } } } } return output; }; }<|fim▁end|>
throw new TypeError( 'Cannot convert undefined or null to object' );
<|file_name|>setup_bgmapi.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages setup( name = 'project',<|fim▁hole|> packages = find_packages(), entry_points = {'scrapy': ['settings = bgmapi.settings']}, )<|fim▁end|>
version = '1.0',
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import functools from framework.auth import Auth<|fim▁hole|> from website.archiver import ( StatResult, AggregateStatResult, ARCHIVER_NETWORK_ERROR, ARCHIVER_SIZE_EXCEEDED, ARCHIVER_FILE_NOT_FOUND, ARCHIVER_FORCED_FAILURE, ) from website import ( mails, settings ) from osf.utils.sanitize import unescape_entities def send_archiver_size_exceeded_mails(src, user, stat_result, url): mails.send_mail( to_addr=settings.OSF_SUPPORT_EMAIL, mail=mails.ARCHIVE_SIZE_EXCEEDED_DESK, user=user, src=src, stat_result=stat_result, can_change_preferences=False, url=url, ) mails.send_mail( to_addr=user.username, mail=mails.ARCHIVE_SIZE_EXCEEDED_USER, user=user, src=src, can_change_preferences=False, mimetype='html', ) def send_archiver_copy_error_mails(src, user, results, url): mails.send_mail( to_addr=settings.OSF_SUPPORT_EMAIL, mail=mails.ARCHIVE_COPY_ERROR_DESK, user=user, src=src, results=results, url=url, can_change_preferences=False, ) mails.send_mail( to_addr=user.username, mail=mails.ARCHIVE_COPY_ERROR_USER, user=user, src=src, results=results, can_change_preferences=False, mimetype='html', ) def send_archiver_file_not_found_mails(src, user, results, url): mails.send_mail( to_addr=settings.OSF_SUPPORT_EMAIL, mail=mails.ARCHIVE_FILE_NOT_FOUND_DESK, can_change_preferences=False, user=user, src=src, results=results, url=url, ) mails.send_mail( to_addr=user.username, mail=mails.ARCHIVE_FILE_NOT_FOUND_USER, user=user, src=src, results=results, can_change_preferences=False, mimetype='html', ) def send_archiver_uncaught_error_mails(src, user, results, url): mails.send_mail( to_addr=settings.OSF_SUPPORT_EMAIL, mail=mails.ARCHIVE_UNCAUGHT_ERROR_DESK, user=user, src=src, results=results, can_change_preferences=False, url=url, ) mails.send_mail( to_addr=user.username, mail=mails.ARCHIVE_UNCAUGHT_ERROR_USER, user=user, src=src, results=results, can_change_preferences=False, mimetype='html', ) def handle_archive_fail(reason, src, dst, user, result): url = settings.INTERNAL_DOMAIN + src._id if reason == ARCHIVER_NETWORK_ERROR: send_archiver_copy_error_mails(src, user, result, url) elif reason == ARCHIVER_SIZE_EXCEEDED: send_archiver_size_exceeded_mails(src, user, result, url) elif reason == ARCHIVER_FILE_NOT_FOUND: send_archiver_file_not_found_mails(src, user, result, url) elif reason == ARCHIVER_FORCED_FAILURE: # Forced failure using scripts.force_fail_registration pass else: # reason == ARCHIVER_UNCAUGHT_ERROR send_archiver_uncaught_error_mails(src, user, result, url) dst.root.sanction.forcibly_reject() dst.root.sanction.save() dst.root.delete_registration_tree(save=True) def archive_provider_for(node, user): """A generic function to get the archive provider for some node, user pair. :param node: target node :param user: target user (currently unused, but left in for future-proofing the code for use with archive providers other than OSF Storage) """ return node.get_addon(settings.ARCHIVE_PROVIDER) def has_archive_provider(node, user): """A generic function for checking whether or not some node, user pair has an attached provider for archiving :param node: target node :param user: target user (currently unused, but left in for future-proofing the code for use with archive providers other than OSF Storage) """ return node.has_addon(settings.ARCHIVE_PROVIDER) def link_archive_provider(node, user): """A generic function for linking some node, user pair with the configured archive provider :param node: target node :param user: target user (currently unused, but left in for future-proofing the code for use with archive providers other than OSF Storage) """ addon = node.get_or_add_addon(settings.ARCHIVE_PROVIDER, auth=Auth(user), log=False) if hasattr(addon, 'on_add'): addon.on_add() node.save() def aggregate_file_tree_metadata(addon_short_name, fileobj_metadata, user): """Recursively traverse the addon's file tree and collect metadata in AggregateStatResult :param src_addon: AddonNodeSettings instance of addon being examined :param fileobj_metadata: file or folder metadata of current point of reference in file tree :param user: archive initatior :return: top-most recursive call returns AggregateStatResult containing addon file tree metadata """ disk_usage = fileobj_metadata.get('size') if fileobj_metadata['kind'] == 'file': result = StatResult( target_name=fileobj_metadata['name'], target_id=fileobj_metadata['path'].lstrip('/'), disk_usage=disk_usage or 0, ) return result else: return AggregateStatResult( target_id=fileobj_metadata['path'].lstrip('/'), target_name=fileobj_metadata['name'], targets=[aggregate_file_tree_metadata(addon_short_name, child, user) for child in fileobj_metadata.get('children', [])], ) def before_archive(node, user): from osf.models import ArchiveJob link_archive_provider(node, user) job = ArchiveJob.objects.create( src_node=node.registered_from, dst_node=node, initiator=user ) job.set_targets() def _do_get_file_map(file_tree): """Reduces a tree of folders and files into a list of (<sha256>, <file_metadata>) pairs """ file_map = [] stack = [file_tree] while len(stack): tree_node = stack.pop(0) if tree_node['kind'] == 'file': file_map.append((tree_node['extra']['hashes']['sha256'], tree_node)) else: stack = stack + tree_node['children'] return file_map def _memoize_get_file_map(func): cache = {} @functools.wraps(func) def wrapper(node): if node._id not in cache: osf_storage = node.get_addon('osfstorage') file_tree = osf_storage._get_file_tree(user=node.creator) cache[node._id] = _do_get_file_map(file_tree) return func(node, cache[node._id]) return wrapper @_memoize_get_file_map def get_file_map(node, file_map): """ note:: file_map is injected implictly by the decorator; this method is called like: get_file_map(node) """ for (key, value) in file_map: yield (key, value, node._id) for child in node.nodes_primary: for key, value, node_id in get_file_map(child): yield (key, value, node_id) def find_registration_file(value, node): """ some annotations: - `value` is the `extra` from a file upload in `registered_meta` (see `Uploader.addFile` in website/static/js/registrationEditorExtensions.js) - `node` is a Registration instance - returns a `(file_info, node_id)` or `(None, None)` tuple, where `file_info` is from waterbutler's api (see `addons.base.models.BaseStorageAddon._get_fileobj_child_metadata` and `waterbutler.core.metadata.BaseMetadata`) """ from osf.models import AbstractNode orig_sha256 = value['sha256'] orig_name = unescape_entities( value['selectedFileName'], safe={ '&lt;': '<', '&gt;': '>' } ) orig_node = value['nodeId'] file_map = get_file_map(node) for sha256, file_info, node_id in file_map: registered_from_id = AbstractNode.load(node_id).registered_from._id if sha256 == orig_sha256 and registered_from_id == orig_node and orig_name == file_info['name']: return file_info, node_id return None, None def find_registration_files(values, node): """ some annotations: - `values` is from `registered_meta`, e.g. `{ comments: [], value: '', extra: [] }` - `node` is a Registration model instance - returns a list of `(file_info, node_id, index)` or `(None, None, index)` tuples, where `file_info` is from `find_registration_file` above """ ret = [] for i in range(len(values.get('extra', []))): ret.append(find_registration_file(values['extra'][i], node) + (i,)) return ret def get_title_for_question(schema, path): path = path.split('.') root = path.pop(0) item = None for page in schema['pages']: questions = { q['qid']: q for q in page['questions'] } if root in questions: item = questions[root] title = item.get('title') while len(path): item = item.get(path.pop(0), {}) title = item.get('title', title) return title def find_selected_files(schema, metadata): """ some annotations: - `schema` is a RegistrationSchema instance - `metadata` is from `registered_meta` (for the given schema) - returns a dict that maps from each `osf-upload` question id (`.`-delimited path) to its chunk of metadata, e.g. `{ 'q1.uploader': { comments: [], extra: [...], value: 'foo.pdf' } }` """ targets = [] paths = [('', p) for p in schema.schema['pages']] while len(paths): prefix, path = paths.pop(0) if path.get('questions'): paths = paths + [('', q) for q in path['questions']] elif path.get('type'): qid = path.get('qid', path.get('id')) if path['type'] == 'object': paths = paths + [('{}.{}.value'.format(prefix, qid), p) for p in path['properties']] elif path['type'] == 'osf-upload': targets.append('{}.{}'.format(prefix, qid).lstrip('.')) selected = {} for t in targets: parts = t.split('.') value = metadata.get(parts.pop(0)) while value and len(parts): value = value.get(parts.pop(0)) if value: selected[t] = value return selected VIEW_FILE_URL_TEMPLATE = '/project/{node_id}/files/osfstorage/{file_id}/' def deep_get(obj, path): parts = path.split('.') item = obj key = None while len(parts): key = parts.pop(0) item[key] = item.get(key, {}) item = item[key] return item def migrate_file_metadata(dst, schema): metadata = dst.registered_meta[schema._id] missing_files = [] selected_files = find_selected_files(schema, metadata) for path, selected in selected_files.items(): target = deep_get(metadata, path) for archived_file_info, node_id, index in find_registration_files(selected, dst): if not archived_file_info: missing_files.append({ 'file_name': selected['extra'][index]['selectedFileName'], 'question_title': get_title_for_question(schema.schema, path) }) continue archived_file_id = archived_file_info['path'].lstrip('/') target['extra'][index]['viewUrl'] = VIEW_FILE_URL_TEMPLATE.format(node_id=node_id, file_id=archived_file_id) if missing_files: from website.archiver.tasks import ArchivedFileNotFound raise ArchivedFileNotFound( registration=dst, missing_files=missing_files ) dst.registered_meta[schema._id] = metadata dst.registration_responses = dst.flatten_registration_metadata() dst.save()<|fim▁end|>
<|file_name|>SimpleObject.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|># Copyright (c) 2017 Jason Lowe-Power # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Jason Lowe-Power from m5.params import * from m5.SimObject import SimObject class SimpleObject(SimObject): type = 'SimpleObject' cxx_header = "learning_gem5/part2/simple_object.hh"<|fim▁end|>
<|file_name|>window.js<|end_file_name|><|fim▁begin|>/** * A debounce method that has a sliding window, there's a minimum and maximum wait time **/ module.exports = function (cb, min, max, settings) { var ctx, args, next, limit, timeout; <|fim▁hole|> settings = {}; } function fire() { limit = null; cb.apply(settings.context || ctx, args); } function run() { var now = Date.now(); if (now >= limit || now >= next) { fire(); } else { timeout = setTimeout(run, Math.min(limit, next) - now); } } let fn = function windowed() { var now = Date.now(); ctx = this; args = arguments; next = now + min; if (!limit) { limit = now + max; timeout = setTimeout(run, min); } }; fn.clear = function () { clearTimeout(timeout); timeout = null; limit = null; }; fn.flush = function () { fire(); fn.clear(); }; fn.shift = function (diff) { limit += diff; }; fn.active = function () { return !!limit; }; return fn; };<|fim▁end|>
if (!settings) {
<|file_name|>app.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core'; import { BrowserModule } from '@angular/platform-browser'; import { FormsModule } from '@angular/forms'; import { AppComponent } from './app.component'; import { jqxCheckBoxComponent } from 'components/angular_jqxcheckbox'; @NgModule({ imports: [BrowserModule, FormsModule], declarations: [AppComponent, jqxCheckBoxComponent],<|fim▁hole|>export class AppModule { }<|fim▁end|>
bootstrap: [AppComponent] })
<|file_name|>cdpserver.py<|end_file_name|><|fim▁begin|>''' ThunderGate - an open source toolkit for PCI bus exploration Copyright (C) 2015-2016 Saul St. John This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import os import json import sys import platform import traceback import functools import struct from collections import namedtuple p = platform.system() if "Windows" == p: LINE_SEP = "\n" else: LINE_SEP = "\r\n" del p from image import Image from monitor import ExecutionMonitor from datamodel import model_registers, model_memory, get_data_value, GenericModel from blocks.cpu import mips_regs try: from capstone import * from capstone.mips import * if cs_version()[0] < 3: print "[-] capstone outdated - disassembly unavailable" _no_capstone = True else: _no_capstone = False md_mode = CS_MODE_MIPS32 + CS_MODE_BIG_ENDIAN md = Cs(CS_ARCH_MIPS, md_mode) md.detail = True md.skipdata = True def _disassemble_word(word): i = struct.pack(">I", word) r = md.disasm(i, 4).next() return "%s %s" % (r.mnemonic, r.op_str) except: print "[-] capstone not present - disassembly unavailable" _no_capstone = True class ScopeModel(GenericModel): pass class Var_Tracker(object): def __init__(self): self._references = [] self._scopes = [] self._fixed_reference_end = None self._fixed_scope_end = None self._known_frame_levels = [] def _assign_variablesReference(self, v): self._references.append(v) v.variablesReference = len(self._references) def _add_variables_references(self, v): if hasattr(v, "children") and isinstance(v.children, list) and len(v.children) > 0: self._assign_variablesReference(v)<|fim▁hole|> c.scope = v.scope self._add_variables_references(c) def add_fixed_scope(self, s, fl=-1): if self._fixed_scope_end: raise Exception("fixed scopes cannot be added while dynamic scopes are present") self._add_scope(s, fl) def _add_scope(self, s, fl=0): print "adding scope %s" % s.name s.fl = fl self._assign_variablesReference(s) self._scopes += [s] for c in s.children: c.scope = s self._add_variables_references(c) if not fl in self._known_frame_levels: self._known_frame_levels += [fl] def add_dynamic_scope(self, s, fl = 0): if not self._fixed_scope_end: self._fixed_scope_end = len(self._scopes) self._fixed_reference_end = len(self._references) self._add_scope(s, fl) def clear_dynamic_scopes(self): self._scopes = self._scopes[:self._fixed_scope_end] self._references = self._references[:self._fixed_reference_end] self._fixed_scope_end = None self._fixed_reference_end = None self._known_frame_levels = [] def get_scopes(self, fl=0): return [s for s in self._scopes if s.fl == fl or s.fl == -1] def dereference(self, ref_no): return self._references[ref_no - 1] class CDPServer(object): def __init__(self, dev, di, do): self.data_in = di self.data_out = do self.dev = dev self._monitor = ExecutionMonitor(dev, functools.partial(CDPServer._evt_stopped, self)) self.__dispatch_setup() self._register_model = model_registers(dev) self._register_model.accessor = functools.partial(get_data_value, mroot=self._register_model) self._memory_model = model_memory(dev) self._memory_model.accessor = functools.partial(get_data_value, mroot=self._register_model) self._vt = Var_Tracker() self._vt.add_fixed_scope(self._register_model) self._vt.add_fixed_scope(self._memory_model) for c in self._register_model.children: if c.name == "rxcpu": s = ScopeModel("rxcpu registers") s2 = ScopeModel("rxcpu state") for r in c.children: if r.name[0] == 'r' and r.name[1:].isdigit(): reg_no = int(r.name[1:]) reg_name = mips_regs.inv[reg_no] reg_for_display = GenericModel(r.name, r.parent) reg_for_display.display_name = reg_name s.children += [reg_for_display] if r.name == "pc": ss = GenericModel(r.name, r.parent) ss.display_name = "program counter" ss.accessor = lambda r=r:self._register_model.accessor(r) s2.children += [ss] if r.name == "ir": ss = GenericModel(r.name, r.parent) ss.display_name = "instruction register" ss.accessor = lambda r=r:self._register_model.accessor(r) s2.children += [ss] if not _no_capstone: ss = GenericModel(r.name, r.parent) ss.display_name = "instruction register (decoded)" ss.accessor = lambda r=r:_disassemble_word(self._register_model.accessor(r)) s2.children += [ss] if r.name in ["mode", "status"]: ss = GenericModel(r.name, r.parent) ss.accessor = lambda r=r:self._register_model.accessor(r) for b in r.children: cc = GenericModel(b.name, b.parent) cc.accessor = lambda b=b:self._register_model.accessor(b) ss.children += [cc] s2.children += [ss] s.accessor = self._register_model.accessor self._vt.add_fixed_scope(s, fl=1) s2.accessor = lambda x: x.accessor() self._vt.add_fixed_scope(s2, fl=1) self._breakpoints = {} self._bp_replaced_insn = {} def __enter__(self): return self def __exit__(self, t, v, traceback): pass def __dispatch_setup(self): self.__dispatch_tbl = {} for i in self.__class__.__dict__: if len(i) > 5 and i[0:5] == "_cmd_": self.__dispatch_tbl[unicode(i[5:])] = getattr(self, i) def _dispatch_cmd(self, cmd): try: fncall = self.__dispatch_tbl[cmd["command"]] except: fncall = self._default_cmd fncall(cmd) def _cmd_initialize(self, cmd): self._seq = 1 ex = {} ex["supportsConfigurationDoneRequest"] = True ex["supportEvaluateForHovers"] = False self._respond(cmd, True, ex=ex) def _cmd_launch(self, cmd): try: stop_now = cmd["arguments"]["stopOnEntry"] except: stop_now = False program = cmd["arguments"]["program"] self._image = Image(program) self.dev.rxcpu.reset() #self.dev.ma.mode.fast_ath_read_disable = 1 #self.dev.ma.mode.cpu_pipeline_request_disable = 1 #self.dev.ma.mode.low_latency_enable = 1 self.dev.rxcpu.image_load(*self._image.executable) self._respond(cmd, True) if stop_now: b = {} b["reason"] = "launch" b["threadId"] = 1 self._event("stopped", body = b) else: self._monitor.watch() self._event("initialized") def _cmd_setExceptionBreakpoints(self, cmd): self._respond(cmd, False) def _cmd_setBreakpoints(self, cmd): source = os.path.basename(cmd["arguments"]["source"]["path"]) self._clear_breakpoint(source) breakpoints_set = [] if "lines" in cmd["arguments"]: for line in cmd["arguments"]["lines"]: success = self._setup_breakpoint(source, line) b = {"verified": success, "line": line} breakpoints_set += [b] if "breakpoints" in cmd["arguments"]: for bp in cmd["arguments"]["breakpoints"]: line = bp["line"] if "condition" in bp: success = False else: success = self._setup_breakpoint(source, line) b = {"verified": success, "line": line} breakpoints_set += [b] self._respond(cmd, True, body = {"breakpoints": breakpoints_set}) def __advance_to_next_line(self): while True: self.dev.rxcpu.mode.single_step = 1 count = 0 while self.dev.rxcpu.mode.single_step: count += 1 if count > 500: raise Exception("single step bit failed to clear") current_pc = self.dev.rxcpu.pc if not current_pc in self._bp_replaced_insn: ir_reg_val = self.dev.rxcpu.ir insn_from_mem = struct.unpack(">I", self.dev.rxcpu.tr_read(current_pc, 1))[0] if ir_reg_val != insn_from_mem: print "ir reg is %x, should be %x, fixing." % (ir_reg_val, insn_from_mem) self.dev.rxcpu.ir = insn_from_mem ir_reg_val = self.dev.rxcpu.ir assert ir_reg_val == insn_from_mem else: self.__prepare_resume_from_breakpoint() if current_pc in self._image._addresses: break def _cmd_next(self, cmd): self._respond(cmd, True) pc = self.dev.rxcpu.pc cl = self._image.addr2line(pc) self._log_write("single step began at pc: %x, cl: %s" % (pc, cl)) self.__advance_to_next_line() pc = self.dev.rxcpu.pc cl = self._image.addr2line(pc) self._log_write("single step completed at pc: %x, cl: \"%s\"" % (pc, cl)) self.__prepare_resume_from_breakpoint() self._event("stopped", {"reason": "step", "threadId": 1}) def _cmd_threads(self, cmd): t = {} t["id"] = 1 t["name"] = "Main Thread" b = {} b["threads"] = [t] self._respond(cmd, True, body = b) def _cmd_disconnect(self, cmd): self._running = False self._respond(cmd, True) def _cmd_continue(self, cmd): self._respond(cmd, True) self._monitor.watch() def _cmd_pause(self, cmd): self._respond(cmd, True) self.dev.rxcpu.halt() def _cmd_stackTrace(self, cmd): self._vt.clear_dynamic_scopes() self._stack = self.__stack_unwind() frame_id = 1 b = {"stackFrames": []} for f in self._stack: loc = self._image.loc_at(f["pc"]) print "0x%x" % f["pc"] source_path = loc[3] + os.sep + loc[1] source_name = "fw" + os.sep + loc[1] s = {"name": source_name, "path": source_path} f = {"id": frame_id, "name": loc[0], "line": int(loc[2]), "column": 1, "source": s} b["stackFrames"] += [f] frame_id += 1 self._respond(cmd, True, body = b) def __stack_unwind(self): frame_state_attrs = ["r%d" % x for x in range(32)] + ["pc"] frame_state = {} for a in frame_state_attrs: frame_state[a] = getattr(self.dev.rxcpu, a) if 0x8008000 <= frame_state["pc"] and 0x8008010 > frame_state["pc"]: return [frame_state] else: return self.__unwind_stack_from([frame_state]) def __unwind_stack_from(self, frame_states): frame_state = self.__restore_cf(frame_states[-1]) if frame_state is None: return frame_states return_address = frame_state["r31"] call_site = return_address - 8 if 0x8000000 <= call_site and 0x8010000 > call_site: frame_state["pc"] = call_site if not frame_state["r31"] is None: return self.__unwind_stack_from(frame_states + [frame_state]) else: return frame_states + [frame_state] else: return frame_states + [frame_state] def __find_cfa_tbl_line_for(self, pc): result = None for entry in sorted(self._image._cfa_rule.keys(), reverse=True): if pc >= entry and entry != 0: result = self._image._cfa_rule[entry] break return result def __restore_cf(self, frame_state): new_frame_state = frame_state.copy() pc = frame_state["pc"] tbl_line = self.__find_cfa_tbl_line_for(pc) if tbl_line is None: return None cfa_rule = tbl_line["cfa"] if not cfa_rule.expr is None: raise Exception("DWARF expression unhandled in CFA") cfa = frame_state["r%d" % cfa_rule.reg] cfa += cfa_rule.offset new_frame_state["r29"] = cfa for reg in tbl_line: if reg in ["cfa", "pc"]: continue reg_rule = tbl_line[reg] if reg_rule.type != 'OFFSET': raise Exception("DWARF register rule type %s unhandled" % reg_rule.type) reg_val_addr = cfa + reg_rule.arg reg_val = struct.unpack(">I", self.dev.rxcpu.tr_read(reg_val_addr, 1))[0] new_frame_state["r%d" % reg] = reg_val return new_frame_state def _collect_scopes(self, frame): loc = self._image.loc_at(frame["pc"]) func_name, cu_name, cu_line, source_dir = loc scopes = [] if len(self._image._compile_units[cu_name]["variables"]) > 0: global_scope = ScopeModel("global variables") global_scope.children = self._collect_vars(self._image._compile_units[cu_name]["variables"], global_scope, frame) global_scope.accessor = lambda x: x.evaluator() scopes += [global_scope] if func_name: if len(self._image._compile_units[cu_name]["functions"][func_name]["args"]) > 0: argument_scope = ScopeModel("function arguments") argument_scope.children = self._collect_vars(self._image._compile_units[cu_name]["functions"][func_name]["args"], argument_scope, frame) argument_scope.accessor = lambda x: x.evaluator() scopes += [argument_scope] if len(self._image._compile_units[cu_name]["functions"][func_name]["vars"]) > 0: local_scope = ScopeModel("local variables") local_scope.children = self._collect_vars(self._image._compile_units[cu_name]["functions"][func_name]["vars"], local_scope, frame) local_scope.accessor = lambda x: x.evaluator() scopes += [local_scope] return scopes def _collect_vars(self, variables, scope, frame): def _var_pp(v): try: return "%x" % v except: return str(v) collected = [] for v in variables: o = GenericModel(v, scope, scope) o.evaluator = lambda v=v: _var_pp(self._image.get_expr_evaluator().process_expr(self.dev, variables[v]["location"], frame)) collected += [o] return collected def _cmd_scopes(self, cmd): frame_id = cmd["arguments"]["frameId"] if not frame_id in self._vt._known_frame_levels: frame = self._stack[frame_id - 1] dynamic_scopes = self._collect_scopes(frame) for scope in dynamic_scopes: self._vt.add_dynamic_scope(scope, frame_id) scopes = [] for s in self._vt.get_scopes(frame_id): scopes += [{"name": s.name, "variablesReference": s.variablesReference, "expensive": True}] b = {"scopes": scopes} self._respond(cmd, True, body = b) def _cmd_variables(self, cmd): members = self._vt.dereference(cmd["arguments"]["variablesReference"]) b = {} b["variables"] = [] for child in members.children: o = {} try: o["name"] = child.display_name except: o["name"] = child.name if hasattr(child, "variablesReference"): o["variablesReference"] = child.variablesReference o["value"] = "" else: o["variablesReference"] = 0 data_value = child.scope.accessor(child) try: o["value"] = "%x" % data_value except: o["value"] = str(data_value) b["variables"] += [o] self._respond(cmd, True, body = b) def _default_cmd(self, cmd): self._log_write("unknown command: %s" % cmd["command"]) self._respond(cmd, False) def _log_write(self, data): print data.strip() sys.stdout.flush() def _evt_stopped(self): b = {"threadId": 1} pc = self.dev.rxcpu.pc if self.dev.rxcpu.status.halted: b["reason"] = "pause" if not self._image.addr2line(pc): print "halted at unknown pc %x, advancing..." % pc self.__advance_to_next_line() pc = self.dev.rxcpu.pc cl = self._image.addr2line(pc) print "finished halting at pc %x, \"%s\"" % (pc, cl) self.__prepare_resume_from_breakpoint() else: if pc in self._bp_replaced_insn: b["reason"] = "breakpoint" print "breakpoint reached at %x (\"%s\")" % (pc, self._image.addr2line(pc)) self.__prepare_resume_from_breakpoint() else: b["reason"] = "exception" b["text"] = "status: %x" % self.dev.rxcpu.status.word print "stopped on unknown rxcpu exception at %x (\"%s\"), status: %x" % (pc, self._image.addr2line(pc), self.dev.rxcpu.status.word) self._event("stopped", body = b) def _event(self, event, body = None): r = {} r["type"] = "event" r["seq"] = self._seq self._seq += 1 r["event"] = event if body is not None: r["body"] = body self.send(r) def _respond(self, req, success, message = None, body = None, ex=None): r = {} r["type"] = "response" r["seq"] = self._seq self._seq += 1 r["request_seq"] = req["seq"] r["success"] = True if success else False r["command"] = req["command"] if message is not None: r["message"] = message if body is not None: r["body"] = body if ex is not None: r.update(ex) self.send(r) def __insn_repl(self, addr, replacement): original_insn = struct.unpack("!I", self.dev.rxcpu.tr_read(addr, 1))[0] self.dev.rxcpu.tr_write_dword(addr, replacement) return original_insn try: self._breakpoints[addr] = original_insn except: self._breakpoints = {addr: original_insn} def _setup_breakpoint(self, filename, line): try: line_addrs = self._image.line2addr(filename, line) except: return False if not filename in self._breakpoints: self._breakpoints[filename] = {} current_breakpoints = self._breakpoints[filename] for addr in line_addrs: if line in current_breakpoints and addr in current_breakpoints[line]: print "breakpoint at %s+%d already set" % (filename, line) else: self._bp_replaced_insn[addr] = self.__insn_repl(addr, 0xd) try: current_breakpoints[line] += [addr] except: current_breakpoints[line] = [addr] print "breakpoint set at \"%s+%d\" (%x)" % (filename, line, addr) return True def _clear_breakpoint(self, filename, line_no = None): try: current_breakpoints = self._breakpoints[filename] except: return if line_no is None: lines = current_breakpoints.keys() else: lines = [line_no] for line in lines: line_addrs = current_breakpoints[line] for addr in line_addrs: self.__insn_repl(addr, self._bp_replaced_insn[addr]) del self._bp_replaced_insn[addr] print "breakpoint cleared at \"%s+%d\" (%x)" % (filename, line, addr) del self._breakpoints[filename][line] def __prepare_resume_from_breakpoint(self): pc = self.dev.rxcpu.pc if pc in self._bp_replaced_insn: replacement = self._bp_replaced_insn[pc] print "pc %x is a soft breakpoint, restoring ir with %x" % (pc, replacement) self.dev.rxcpu.ir = replacement def send(self, resp): r = json.dumps(resp, separators=(",",":")) cl = len(r) txt = "Content-Length: %d%s%s" % (cl, LINE_SEP + LINE_SEP, r) self._log_write("sent: %s\n" % r) self.data_out.write(txt) self.data_out.flush() def recv(self): h = self.data_in.readline() content_length = int(h.split(" ")[1]) d = self.data_in.readline() d = self.data_in.read(content_length) self._log_write("rcvd: %s\n" % repr(d)) try: j = json.loads(d) except: self._log_write("EXCEPTION!") return j def run(self): self._running = True while self._running: j = self.recv() try: self._dispatch_cmd(j) except Exception as e: traceback.print_exc(file=sys.stdout) raise e return 0<|fim▁end|>
for c in v.children:
<|file_name|>ExactInlineInfo.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2013, 2021, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package org.graalvm.compiler.phases.common.inlining.info; import org.graalvm.collections.EconomicSet; import org.graalvm.compiler.graph.Node; import org.graalvm.compiler.nodes.Invoke;<|fim▁hole|>import org.graalvm.compiler.phases.common.inlining.info.elem.Inlineable; import org.graalvm.compiler.phases.util.Providers; import jdk.vm.ci.meta.ResolvedJavaMethod; /** * Represents an inlining opportunity where the compiler can statically determine a monomorphic * target method and therefore is able to determine the called method exactly. */ public class ExactInlineInfo extends AbstractInlineInfo { protected final ResolvedJavaMethod concrete; private Inlineable inlineableElement; private boolean suppressNullCheck; public ExactInlineInfo(Invoke invoke, ResolvedJavaMethod concrete) { super(invoke); this.concrete = concrete; assert concrete != null; } public void suppressNullCheck() { suppressNullCheck = true; } @Override public EconomicSet<Node> inline(CoreProviders providers, String reason) { return inline(invoke, concrete, inlineableElement, !suppressNullCheck, reason); } @Override public void tryToDevirtualizeInvoke(Providers providers) { // nothing todo, can already be bound statically } @Override public int numberOfMethods() { return 1; } @Override public ResolvedJavaMethod methodAt(int index) { assert index == 0; return concrete; } @Override public double probabilityAt(int index) { assert index == 0; return 1.0; } @Override public double relevanceAt(int index) { assert index == 0; return 1.0; } @Override public String toString() { return "exact " + concrete.format("%H.%n(%p):%r"); } @Override public Inlineable inlineableElementAt(int index) { assert index == 0; return inlineableElement; } @Override public void setInlinableElement(int index, Inlineable inlineableElement) { assert index == 0; this.inlineableElement = inlineableElement; } @Override public boolean shouldInline() { return concrete.shouldBeInlined(); } }<|fim▁end|>
import org.graalvm.compiler.nodes.spi.CoreProviders;
<|file_name|>fake_video_capture_device.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "media/video/capture/fake_video_capture_device.h" #include <string> #include "base/bind.h" #include "base/memory/scoped_ptr.h" #include "base/strings/stringprintf.h" #include "media/audio/fake_audio_input_stream.h" #include "media/base/video_frame.h" #include "third_party/skia/include/core/SkBitmap.h" #include "third_party/skia/include/core/SkCanvas.h" #include "third_party/skia/include/core/SkPaint.h" namespace media { static const int kFakeCaptureBeepCycle = 10; // Visual beep every 0.5s. static const int kFakeCaptureCapabilityChangePeriod = 30; FakeVideoCaptureDevice::FakeVideoCaptureDevice() : capture_thread_("CaptureThread"), frame_count_(0), format_roster_index_(0) {} FakeVideoCaptureDevice::~FakeVideoCaptureDevice() { DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(!capture_thread_.IsRunning()); } void FakeVideoCaptureDevice::AllocateAndStart( const VideoCaptureParams& params, scoped_ptr<VideoCaptureDevice::Client> client) { DCHECK(thread_checker_.CalledOnValidThread());<|fim▁hole|> NOTREACHED(); return; } capture_thread_.Start(); capture_thread_.message_loop()->PostTask( FROM_HERE, base::Bind(&FakeVideoCaptureDevice::OnAllocateAndStart, base::Unretained(this), params, base::Passed(&client))); } void FakeVideoCaptureDevice::StopAndDeAllocate() { DCHECK(thread_checker_.CalledOnValidThread()); if (!capture_thread_.IsRunning()) { NOTREACHED(); return; } capture_thread_.message_loop()->PostTask( FROM_HERE, base::Bind(&FakeVideoCaptureDevice::OnStopAndDeAllocate, base::Unretained(this))); capture_thread_.Stop(); } void FakeVideoCaptureDevice::PopulateVariableFormatsRoster( const VideoCaptureFormats& formats) { DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(!capture_thread_.IsRunning()); format_roster_ = formats; format_roster_index_ = 0; } void FakeVideoCaptureDevice::OnAllocateAndStart( const VideoCaptureParams& params, scoped_ptr<VideoCaptureDevice::Client> client) { DCHECK_EQ(capture_thread_.message_loop(), base::MessageLoop::current()); client_ = client.Pass(); // Incoming |params| can be none of the supported formats, so we get the // closest thing rounded up. TODO(mcasas): Use the |params|, if they belong to // the supported ones, when http://crbug.com/309554 is verified. DCHECK_EQ(params.requested_format.pixel_format, PIXEL_FORMAT_I420); capture_format_.pixel_format = params.requested_format.pixel_format; capture_format_.frame_rate = 30; if (params.requested_format.frame_size.width() > 1280) capture_format_.frame_size.SetSize(1920, 1080); else if (params.requested_format.frame_size.width() > 640) capture_format_.frame_size.SetSize(1280, 720); else if (params.requested_format.frame_size.width() > 320) capture_format_.frame_size.SetSize(640, 480); else capture_format_.frame_size.SetSize(320, 240); const size_t fake_frame_size = VideoFrame::AllocationSize(VideoFrame::I420, capture_format_.frame_size); fake_frame_.reset(new uint8[fake_frame_size]); capture_thread_.message_loop()->PostTask( FROM_HERE, base::Bind(&FakeVideoCaptureDevice::OnCaptureTask, base::Unretained(this))); } void FakeVideoCaptureDevice::OnStopAndDeAllocate() { DCHECK_EQ(capture_thread_.message_loop(), base::MessageLoop::current()); client_.reset(); } void FakeVideoCaptureDevice::OnCaptureTask() { if (!client_) return; const size_t frame_size = VideoFrame::AllocationSize(VideoFrame::I420, capture_format_.frame_size); memset(fake_frame_.get(), 0, frame_size); SkImageInfo info = SkImageInfo::MakeA8(capture_format_.frame_size.width(), capture_format_.frame_size.height()); SkBitmap bitmap; bitmap.installPixels(info, fake_frame_.get(), info.width()); SkCanvas canvas(bitmap); // Draw a sweeping circle to show an animation. int radius = std::min(capture_format_.frame_size.width(), capture_format_.frame_size.height()) / 4; SkRect rect = SkRect::MakeXYWH(capture_format_.frame_size.width() / 2 - radius, capture_format_.frame_size.height() / 2 - radius, 2 * radius, 2 * radius); SkPaint paint; paint.setStyle(SkPaint::kFill_Style); // Only Y plane is being drawn and this gives 50% grey on the Y // plane. The result is a light green color in RGB space. paint.setAlpha(128); int end_angle = (frame_count_ % kFakeCaptureBeepCycle * 360) / kFakeCaptureBeepCycle; if (!end_angle) end_angle = 360; canvas.drawArc(rect, 0, end_angle, true, paint); // Draw current time. int elapsed_ms = kFakeCaptureTimeoutMs * frame_count_; int milliseconds = elapsed_ms % 1000; int seconds = (elapsed_ms / 1000) % 60; int minutes = (elapsed_ms / 1000 / 60) % 60; int hours = (elapsed_ms / 1000 / 60 / 60) % 60; std::string time_string = base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds, milliseconds, frame_count_); canvas.scale(3, 3); canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint); if (frame_count_ % kFakeCaptureBeepCycle == 0) { // Generate a synchronized beep sound if there is one audio input // stream created. FakeAudioInputStream::BeepOnce(); } frame_count_++; // Give the captured frame to the client. client_->OnIncomingCapturedData(fake_frame_.get(), frame_size, capture_format_, 0, base::TimeTicks::Now()); if (!(frame_count_ % kFakeCaptureCapabilityChangePeriod) && format_roster_.size() > 0U) { Reallocate(); } // Reschedule next CaptureTask. capture_thread_.message_loop()->PostDelayedTask( FROM_HERE, base::Bind(&FakeVideoCaptureDevice::OnCaptureTask, base::Unretained(this)), base::TimeDelta::FromMilliseconds(kFakeCaptureTimeoutMs)); } void FakeVideoCaptureDevice::Reallocate() { DCHECK_EQ(capture_thread_.message_loop(), base::MessageLoop::current()); capture_format_ = format_roster_.at(++format_roster_index_ % format_roster_.size()); DCHECK_EQ(capture_format_.pixel_format, PIXEL_FORMAT_I420); DVLOG(3) << "Reallocating FakeVideoCaptureDevice, new capture resolution " << capture_format_.frame_size.ToString(); const size_t fake_frame_size = VideoFrame::AllocationSize(VideoFrame::I420, capture_format_.frame_size); fake_frame_.reset(new uint8[fake_frame_size]); } } // namespace media<|fim▁end|>
if (capture_thread_.IsRunning()) {
<|file_name|>dllmain.cpp<|end_file_name|><|fim▁begin|>/* This file is part of: NoahFrame https://github.com/ketoo/NoahGameFrame Copyright 2009 - 2018 NoahFrame(NoahGameFrame) File creator: lvsheng.huang NoahFrame is open-source software and you can redistribute it and/or modify it under the terms of the License; besides, anyone who use this file/software must include this copyright announcement. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #include "NFComm/NFPluginModule/NFPlatform.h"<|fim▁hole|>#ifdef NF_DEBUG_MODE #if NF_PLATFORM == NF_PLATFORM_WIN #pragma comment( lib, "ws2_32" ) #pragma comment( lib, "NFMessageDefine_d.lib" ) #pragma comment( lib, "libprotobuf_d.lib" ) #pragma comment( lib, "NFCore_d.lib" ) #elif NF_PLATFORM == NF_PLATFORM_LINUX || NF_PLATFORM == NF_PLATFORM_ANDROID #pragma comment( lib, "NFMessageDefine_d.a" ) #pragma comment( lib, "libprotobuf_d.a" ) #pragma comment( lib, "NFCore_d.a" ) #elif NF_PLATFORM == NF_PLATFORM_APPLE || NF_PLATFORM == NF_PLATFORM_APPLE_IOS #endif #else #if NF_PLATFORM == NF_PLATFORM_WIN #pragma comment( lib, "ws2_32" ) #pragma comment( lib, "NFMessageDefine.lib" ) #pragma comment( lib, "libprotobuf.lib" ) #pragma comment( lib, "NFCore.lib" ) #elif NF_PLATFORM == NF_PLATFORM_LINUX || NF_PLATFORM == NF_PLATFORM_ANDROID #pragma comment( lib, "NFMessageDefine.a" ) #pragma comment( lib, "libprotobuf.a" ) #pragma comment( lib, "NFCore.a" ) #elif NF_PLATFORM == NF_PLATFORM_APPLE || NF_PLATFORM == NF_PLATFORM_APPLE_IOS #endif #endif<|fim▁end|>
<|file_name|>runner.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2019, Cloudflare, Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use quiche::h3::NameValue; use ring::rand::*; use crate::Http3TestError; pub fn run( test: &mut crate::Http3Test, peer_addr: std::net::SocketAddr, verify_peer: bool, idle_timeout: u64, max_data: u64, early_data: bool, session_file: Option<String>, ) -> Result<(), Http3TestError> { const MAX_DATAGRAM_SIZE: usize = 1350; let mut buf = [0; 65535]; let mut out = [0; MAX_DATAGRAM_SIZE]; let max_stream_data = max_data; let version = if let Some(v) = std::env::var_os("QUIC_VERSION") { match v.to_str() { Some("current") => quiche::PROTOCOL_VERSION, Some(v) => u32::from_str_radix(v, 16).unwrap(), _ => 0xbaba_baba, } } else { 0xbaba_baba }; let mut reqs_count = 0; let mut reqs_complete = 0; // Setup the event loop. let poll = mio::Poll::new().unwrap(); let mut events = mio::Events::with_capacity(1024); info!("connecting to {:}", peer_addr); // Bind to INADDR_ANY or IN6ADDR_ANY depending on the IP family of the // server address. This is needed on macOS and BSD variants that don't // support binding to IN6ADDR_ANY for both v4 and v6. let bind_addr = match peer_addr { std::net::SocketAddr::V4(_) => "0.0.0.0:0", std::net::SocketAddr::V6(_) => "[::]:0", }; // Create the UDP socket backing the QUIC connection, and register it with // the event loop. let socket = std::net::UdpSocket::bind(bind_addr).unwrap(); let socket = mio::net::UdpSocket::from_socket(socket).unwrap(); poll.register( &socket, mio::Token(0), mio::Ready::readable(), mio::PollOpt::edge(), ) .unwrap(); // Create the configuration for the QUIC connection. let mut config = quiche::Config::new(version).unwrap(); config.verify_peer(verify_peer); config .set_application_protos(quiche::h3::APPLICATION_PROTOCOL) .unwrap(); config.set_max_idle_timeout(idle_timeout); config.set_max_recv_udp_payload_size(MAX_DATAGRAM_SIZE); config.set_initial_max_data(max_data); config.set_initial_max_stream_data_bidi_local(max_stream_data); config.set_initial_max_stream_data_bidi_remote(max_stream_data); config.set_initial_max_stream_data_uni(max_stream_data); config.set_initial_max_streams_bidi(100); config.set_initial_max_streams_uni(100); config.set_disable_active_migration(true); if early_data { config.enable_early_data(); debug!("early data enabled"); } let mut http3_conn = None; if std::env::var_os("SSLKEYLOGFILE").is_some() { config.log_keys(); } // Generate a random source connection ID for the connection. let mut scid = [0; quiche::MAX_CONN_ID_LEN]; SystemRandom::new().fill(&mut scid[..]).unwrap(); let scid = quiche::ConnectionId::from_ref(&scid); // Create a QUIC connection and initiate handshake. let url = &test.endpoint(); let mut conn = quiche::connect(url.domain(), &scid, peer_addr, &mut config).unwrap(); if let Some(session_file) = &session_file { if let Ok(session) = std::fs::read(session_file) { conn.set_session(&session).ok(); } } let (write, send_info) = conn.send(&mut out).expect("initial send failed"); while let Err(e) = socket.send_to(&out[..write], &send_info.to) { if e.kind() == std::io::ErrorKind::WouldBlock { debug!("send() would block"); continue; } return Err(Http3TestError::Other(format!("send() failed: {:?}", e))); } debug!("written {}", write); let req_start = std::time::Instant::now(); loop { if !conn.is_in_early_data() || http3_conn.is_some() { poll.poll(&mut events, conn.timeout()).unwrap(); } // Read incoming UDP packets from the socket and feed them to quiche, // until there are no more packets to read. 'read: loop { // If the event loop reported no events, it means that the timeout // has expired, so handle it without attempting to read packets. We // will then proceed with the send loop. if events.is_empty() { debug!("timed out"); conn.on_timeout(); break 'read; } let (len, from) = match socket.recv_from(&mut buf) { Ok(v) => v, Err(e) => { // There are no more UDP packets to read, so end the read // loop. if e.kind() == std::io::ErrorKind::WouldBlock { debug!("recv() would block"); break 'read; } return Err(Http3TestError::Other(format!( "recv() failed: {:?}", e ))); }, }; debug!("got {} bytes", len); let recv_info = quiche::RecvInfo { from }; // Process potentially coalesced packets. let read = match conn.recv(&mut buf[..len], recv_info) { Ok(v) => v, Err(quiche::Error::Done) => { debug!("done reading"); break; }, Err(e) => { error!("recv failed: {:?}", e); break 'read; }, }; debug!("processed {} bytes", read); } if conn.is_closed() { info!("connection closed, {:?}", conn.stats()); if !conn.is_established() { error!("connection timed out after {:?}", req_start.elapsed(),); return Err(Http3TestError::HandshakeFail); } if reqs_complete != reqs_count { error!("Client timed out after {:?} and only completed {}/{} requests", req_start.elapsed(), reqs_complete, reqs_count); return Err(Http3TestError::HttpFail); } if let Some(session_file) = session_file { if let Some(session) = conn.session() { std::fs::write(session_file, &session).ok(); } } break; } // Create a new HTTP/3 connection and end an HTTP request as soon as // the QUIC connection is established. if (conn.is_established() || conn.is_in_early_data()) && http3_conn.is_none() { let h3_config = quiche::h3::Config::new().unwrap(); let mut h3_conn = quiche::h3::Connection::with_transport(&mut conn, &h3_config) .unwrap(); reqs_count = test.requests_count(); match test.send_requests(&mut conn, &mut h3_conn) { Ok(_) => (), Err(quiche::h3::Error::Done) => (), Err(e) => { return Err(Http3TestError::Other(format!( "error sending: {:?}", e ))); }, }; http3_conn = Some(h3_conn); } if let Some(http3_conn) = &mut http3_conn { // Process HTTP/3 events. loop { match http3_conn.poll(&mut conn) { Ok((stream_id, quiche::h3::Event::Headers { list, .. })) => { info!( "got response headers {:?} on stream id {}", hdrs_to_strings(&list), stream_id ); test.add_response_headers(stream_id, &list); }, Ok((stream_id, quiche::h3::Event::Data)) => { if let Ok(read) = http3_conn.recv_body(&mut conn, stream_id, &mut buf) { info!( "got {} bytes of response data on stream {}", read, stream_id ); test.add_response_body(stream_id, &buf, read); } }, Ok((_stream_id, quiche::h3::Event::Finished)) => { reqs_complete += 1; info!( "{}/{} responses received", reqs_complete, reqs_count ); if reqs_complete == reqs_count { info!( "Completed test run. {}/{} response(s) received in {:?}, closing...", reqs_complete, reqs_count, req_start.elapsed() ); match conn.close(true, 0x00, b"kthxbye") { // Already closed. Ok(_) | Err(quiche::Error::Done) => (), Err(e) => { return Err(Http3TestError::Other(format!( "error closing conn: {:?}", e ))); }, } test.assert(); break; } match test.send_requests(&mut conn, http3_conn) { Ok(_) => (), Err(quiche::h3::Error::Done) => (), Err(e) => { return Err(Http3TestError::Other(format!( "error sending request: {:?}", e ))); }, } }, Ok((stream_id, quiche::h3::Event::Reset(e))) => { reqs_complete += 1; info!("request was reset by peer with {}", e); test.set_reset_stream_error(stream_id, e); if reqs_complete == reqs_count { info!( "Completed test run. {}/{} response(s) received in {:?}, closing...", reqs_complete, reqs_count, req_start.elapsed() ); match conn.close(true, 0x00, b"kthxbye") { // Already closed. Ok(_) | Err(quiche::Error::Done) => (), Err(e) => { return Err(Http3TestError::Other(format!( "error closing conn: {:?}",<|fim▁hole|> ))); }, } test.assert(); break; } }, Ok((_flow_id, quiche::h3::Event::Datagram)) => (), Ok((_goaway_id, quiche::h3::Event::GoAway)) => (), Err(quiche::h3::Error::Done) => { break; }, Err(e) => { error!("HTTP/3 processing failed: {:?}", e); break; }, } } } // Generate outgoing QUIC packets and send them on the UDP socket, until // quiche reports that there are no more packets to be sent. loop { let (write, send_info) = match conn.send(&mut out) { Ok(v) => v, Err(quiche::Error::Done) => { debug!("done writing"); break; }, Err(e) => { error!("send failed: {:?}", e); conn.close(false, 0x1, b"fail").ok(); break; }, }; if let Err(e) = socket.send_to(&out[..write], &send_info.to) { if e.kind() == std::io::ErrorKind::WouldBlock { debug!("send() would block"); break; } return Err(Http3TestError::Other(format!( "send() failed: {:?}", e ))); } debug!("written {}", write); } if conn.is_closed() { info!("connection closed, {:?}", conn.stats()); if reqs_complete != reqs_count { error!("Client timed out after {:?} and only completed {}/{} requests", req_start.elapsed(), reqs_complete, reqs_count); return Err(Http3TestError::HttpFail); } if let Some(session_file) = session_file { if let Some(session) = conn.session() { std::fs::write(session_file, &session).ok(); } } break; } } Ok(()) } fn hdrs_to_strings(hdrs: &[quiche::h3::Header]) -> Vec<(String, String)> { hdrs.iter() .map(|h| { ( String::from_utf8(h.name().into()).unwrap(), String::from_utf8(h.value().into()).unwrap(), ) }) .collect() }<|fim▁end|>
e
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals from django.db.models import signals from django.dispatch import receiver from django.test import TestCase from django.utils import six from .models import Person, Car # #8285: signals can be any callable class PostDeleteHandler(object): def __init__(self, data): self.data = data def __call__(self, signal, sender, instance, **kwargs): self.data.append( (instance, instance.id is None) ) class MyReceiver(object): def __init__(self, param): self.param = param self._run = False def __call__(self, signal, sender, **kwargs): self._run = True signal.disconnect(receiver=self, sender=sender) class SignalTests(TestCase): def test_basic(self): # Save up the number of connected signals so that we can check at the # end that all the signals we register get properly unregistered (#9989) pre_signals = ( len(signals.pre_save.receivers), len(signals.post_save.receivers), len(signals.pre_delete.receivers), len(signals.post_delete.receivers), ) data = [] def pre_save_test(signal, sender, instance, **kwargs): data.append( (instance, kwargs.get("raw", False)) ) signals.pre_save.connect(pre_save_test) def post_save_test(signal, sender, instance, **kwargs): data.append( (instance, kwargs.get("created"), kwargs.get("raw", False)) ) signals.post_save.connect(post_save_test) def pre_delete_test(signal, sender, instance, **kwargs): data.append( (instance, instance.id is None) ) signals.pre_delete.connect(pre_delete_test) post_delete_test = PostDeleteHandler(data) signals.post_delete.connect(post_delete_test) # throw a decorator syntax receiver into the mix @receiver(signals.pre_save) def pre_save_decorator_test(signal, sender, instance, **kwargs): data.append(instance) @receiver(signals.pre_save, sender=Car) def pre_save_decorator_sender_test(signal, sender, instance, **kwargs): data.append(instance) p1 = Person(first_name="John", last_name="Smith") self.assertEqual(data, []) p1.save() self.assertEqual(data, [ (p1, False), p1, (p1, True, False), ]) data[:] = [] p1.first_name = "Tom" p1.save() self.assertEqual(data, [ (p1, False), p1, (p1, False, False), ]) data[:] = [] # Car signal (sender defined) c1 = Car(make="Volkswagon", model="Passat") c1.save() self.assertEqual(data, [ (c1, False), c1, c1, (c1, True, False), ]) data[:] = [] # Calling an internal method purely so that we can trigger a "raw" save. p1.save_base(raw=True) self.assertEqual(data, [ (p1, True), p1, (p1, False, True), ]) data[:] = [] p1.delete() self.assertEqual(data, [ (p1, False), (p1, False), ]) data[:] = [] p2 = Person(first_name="James", last_name="Jones") p2.id = 99999 p2.save() self.assertEqual(data, [ (p2, False), p2, (p2, True, False), ]) data[:] = [] p2.id = 99998 p2.save() self.assertEqual(data, [ (p2, False), p2,<|fim▁hole|> p2.delete() self.assertEqual(data, [ (p2, False), (p2, False) ]) self.assertQuerysetEqual( Person.objects.all(), [ "James Jones", ], six.text_type ) signals.post_delete.disconnect(post_delete_test) signals.pre_delete.disconnect(pre_delete_test) signals.post_save.disconnect(post_save_test) signals.pre_save.disconnect(pre_save_test) signals.pre_save.disconnect(pre_save_decorator_test) signals.pre_save.disconnect(pre_save_decorator_sender_test, sender=Car) # Check that all our signals got disconnected properly. post_signals = ( len(signals.pre_save.receivers), len(signals.post_save.receivers), len(signals.pre_delete.receivers), len(signals.post_delete.receivers), ) self.assertEqual(pre_signals, post_signals) def test_disconnect_in_dispatch(self): """ Test that signals that disconnect when being called don't mess future dispatching. """ a, b = MyReceiver(1), MyReceiver(2) signals.post_save.connect(sender=Person, receiver=a) signals.post_save.connect(sender=Person, receiver=b) p = Person.objects.create(first_name='John', last_name='Smith') self.assertTrue(a._run) self.assertTrue(b._run) self.assertEqual(signals.post_save.receivers, [])<|fim▁end|>
(p2, True, False), ]) data[:] = []
<|file_name|>0002_auto__add_definedgeography.py<|end_file_name|><|fim▁begin|># encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'DefinedGeography' db.create_table('seak_definedgeography', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(max_length=99)), )) db.send_create_signal('seak', ['DefinedGeography']) # Adding M2M table for field planning_units on 'DefinedGeography' db.create_table('seak_definedgeography_planning_units', ( ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), ('definedgeography', models.ForeignKey(orm['seak.definedgeography'], null=False)), ('planningunit', models.ForeignKey(orm['seak.planningunit'], null=False)) )) db.create_unique('seak_definedgeography_planning_units', ['definedgeography_id', 'planningunit_id']) def backwards(self, orm): # Deleting model 'DefinedGeography' db.delete_table('seak_definedgeography') # Removing M2M table for field planning_units on 'DefinedGeography' db.delete_table('seak_definedgeography_planning_units') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 7, 19, 9, 43, 46, 965579)'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 7, 19, 9, 43, 46, 965425)'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'seak.conservationfeature': { 'Meta': {'object_name': 'ConservationFeature'}, 'dbf_fieldname': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}), 'level1': ('django.db.models.fields.CharField', [], {'max_length': '99'}), 'level2': ('django.db.models.fields.CharField', [], {'max_length': '99', 'null': 'True', 'blank': 'True'}), 'level3': ('django.db.models.fields.CharField', [], {'max_length': '99', 'null': 'True', 'blank': 'True'}), 'level4': ('django.db.models.fields.CharField', [], {'max_length': '99', 'null': 'True', 'blank': 'True'}), 'level5': ('django.db.models.fields.CharField', [], {'max_length': '99', 'null': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '99'}), 'uid': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}), 'units': ('django.db.models.fields.CharField', [], {'max_length': '90', 'null': 'True', 'blank': 'True'}) }, 'seak.cost': { 'Meta': {'object_name': 'Cost'}, 'dbf_fieldname': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}), 'desc': ('django.db.models.fields.TextField', [], {}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '99'}), 'uid': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}), 'units': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}) }, 'seak.definedgeography': { 'Meta': {'object_name': 'DefinedGeography'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '99'}), 'planning_units': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['seak.PlanningUnit']", 'symmetrical': 'False'}) }, 'seak.folder': { 'Meta': {'object_name': 'Folder'}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'seak_folder_related'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': "'255'"}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}), 'sharing_groups': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'seak_folder_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.Group']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'seak_folder_related'", 'to': "orm['auth.User']"}) }, 'seak.planningunit': { 'Meta': {'object_name': 'PlanningUnit'}, 'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'fid': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}), 'geometry': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'srid': '3857', 'null': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '99'}) }, 'seak.planningunitshapes': { 'Meta': {'object_name': 'PlanningUnitShapes'}, 'bests': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'fid': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'geometry': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'srid': '3857', 'null': 'True', 'blank': 'True'}), 'hits': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '99', 'null': 'True'}), 'pu': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['seak.PlanningUnit']"}), 'stamp': ('django.db.models.fields.FloatField', [], {})<|fim▁hole|> 'cf': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['seak.ConservationFeature']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'pu': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['seak.PlanningUnit']"}) }, 'seak.puvscost': { 'Meta': {'unique_together': "(('pu', 'cost'),)", 'object_name': 'PuVsCost'}, 'amount': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}), 'cost': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['seak.Cost']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'pu': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['seak.PlanningUnit']"}) }, 'seak.scenario': { 'Meta': {'object_name': 'Scenario'}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'seak_scenario_related'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'input_geography': ('seak.models.JSONField', [], {}), 'input_penalties': ('seak.models.JSONField', [], {}), 'input_relativecosts': ('seak.models.JSONField', [], {}), 'input_scalefactor': ('django.db.models.fields.FloatField', [], {'default': '0.0'}), 'input_targets': ('seak.models.JSONField', [], {}), 'name': ('django.db.models.fields.CharField', [], {'max_length': "'255'"}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}), 'output_best': ('seak.models.JSONField', [], {'null': 'True', 'blank': 'True'}), 'output_pu_count': ('seak.models.JSONField', [], {'null': 'True', 'blank': 'True'}), 'sharing_groups': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'seak_scenario_related'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.Group']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'seak_scenario_related'", 'to': "orm['auth.User']"}) } } complete_apps = ['seak']<|fim▁end|>
}, 'seak.puvscf': { 'Meta': {'unique_together': "(('pu', 'cf'),)", 'object_name': 'PuVsCf'}, 'amount': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
<|file_name|>testloader.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # coding: utf-8 import unittest import sys import os PROJECT_PATH = os.path.sep.join(os.path.abspath(__file__).split(os.path.sep)[:-2]) ROOT_PATH = os.path.dirname(__file__) if __name__ == '__main__': if 'GAE_SDK' in os.environ: SDK_PATH = os.environ['GAE_SDK']<|fim▁hole|> dev_appserver.fix_sys_path() sys.path.append(os.path.join(PROJECT_PATH, 'src')) tests = unittest.TestLoader().discover(ROOT_PATH, "*.py") result = unittest.TextTestRunner().run(tests) if not result.wasSuccessful(): sys.exit(1)<|fim▁end|>
sys.path.insert(0, SDK_PATH) import dev_appserver
<|file_name|>replace.js.ts<|end_file_name|><|fim▁begin|>import path from 'path'; import fs from 'fs'; import rcs from '../lib'; const fixturesCwd = '__tests__/files/fixtures'; const resultsCwd = '__tests__/files/results'; function replaceJsMacro(input, expected, fillLibrary = fs.readFileSync(path.join(fixturesCwd, '/css/style.css'), 'utf8')): void { rcs.selectorsLibrary.fillLibrary(fillLibrary); rcs.cssVariablesLibrary.fillLibrary(fillLibrary); expect(rcs.replace.js(input)).toBe(expected); expect(rcs.replace.js(Buffer.from(input))).toBe(expected); } beforeEach(() => { rcs.selectorsLibrary.setAlphabet('#abcdefghijklmnopqrstuvwxyz'); rcs.cssVariablesLibrary.setAlphabet('#abcdefghijklmnopqrstuvwxyz'); rcs.selectorsLibrary.reset(); rcs.cssVariablesLibrary.reset(); }); it('replace classes', () => { replaceJsMacro( 'var test = \' something \';\nconst myClass = "jp-block";', 'var test = \' something \';\nconst myClass = "a";', ); }); it('replace nothing on hex', () => { replaceJsMacro( "'\\-.0-9\\u00B7\\u0300-\\u036F\\u203F-\\u2040'", "'\\-.0-9\\u00B7\\u0300-\\u036F\\u203F-\\u2040'", ); }); it('should fail to parse jsx', () => { const input = fs.readFileSync(path.join(fixturesCwd, '/js/react.txt'), 'utf8'); expect(() => { rcs.replace.js(input, { ecmaFeatures: { jsx: false } }); }).toThrow(); }); it('should replace although jsx is disabled', () => { const fillLibrary = fs.readFileSync(path.join(fixturesCwd, '/css/style.css'), 'utf8'); const input = fs.readFileSync(path.join(fixturesCwd, '/js/complex.txt'), 'utf8'); const expected = fs.readFileSync(path.join(resultsCwd, '/js/complex.txt'), 'utf8'); rcs.selectorsLibrary.fillLibrary(fillLibrary); expect(rcs.replace.js(input, { ecmaFeatures: { jsx: false } })).toBe(expected); expect(rcs.replace.js(Buffer.from(input), { ecmaFeatures: { jsx: false } })).toBe(expected); }); it('replace everything from file', () => { replaceJsMacro( fs.readFileSync(path.join(fixturesCwd, '/js/complex.txt'), 'utf8'), fs.readFileSync(path.join(resultsCwd, '/js/complex.txt'), 'utf8'), ); }); it('replace react components', () => { replaceJsMacro( fs.readFileSync(path.join(fixturesCwd, '/js/react.txt'), 'utf8'), fs.readFileSync(path.join(resultsCwd, '/js/react.txt'), 'utf8'), ); }); it('replace escaped prefixes | issue #67', () => { replaceJsMacro( 'var test = "something:withPrefix";\nconst myClass = "jp-block";', 'var test = "a";\nconst myClass = "b";', '.something\\:withPrefix:after{} .jp-block{}', ); }); it('check optional try catch | issue #73', () => { replaceJsMacro( ` try { const selector = "jp-block"; } catch { const selector = "jp-block-two"; } `, ` try { const selector = "a"; } catch { const selector = "b"; } `, '.jp-block{}.jp-block-two{}', ); }); it('check "key" in object non replacement | issue #83', () => { replaceJsMacro( ` const key = "jp-block" in obj; `, ` const key = "jp-block" in obj; `, '.jp-block{}', ); }); it('replace in template | issue #84', () => { replaceJsMacro( 'const templ = `<div class="jp-block" id="someid">`;', 'const templ = `<div class="a" id="a">`;', '.jp-block{}#someid{}', ); }); it('replace in template | more complex', () => { replaceJsMacro( 'const templ = `<div class="jp-block jp-pseudo" id="someid">`;', 'const templ = `<div class="a b" id="a">`;', '.jp-block{}.jp-pseudo{}#someid{}', ); }); it('replace in template | with class for id', () => { replaceJsMacro( 'const templ = `<div class="jp-block jp-pseudo" id="jp-block">`;', 'const templ = `<div class="a b" id="jp-block">`;', '.jp-block{}.jp-pseudo{}#someid{}', ); }); it('replace in template | with id for class', () => { replaceJsMacro( 'const templ = `<div class="someid jp-pseudo" id="someid">`;', 'const templ = `<div class="someid b" id="a">`;', '.jp-block{}.jp-pseudo{}#someid{}', ); }); it('replace css variables | issue rename-css-selectors#38', () => { replaceJsMacro( ` const defaultProps = { secondary: false, offset: "var(--header-height)", }; `, ` const defaultProps = { secondary: false, offset: "var(--a)", }; `, ':root { --header-height: #7EA }', ); }); it('should add no conflict with jsx enabled', () => { replaceJsMacro(<|fim▁hole|> `, ` const text = 'This has no conflicts'; `, '.has { content: "nothing" } .no {} .conflicts { content: "at all" }', ); }); it('should add no conflicts on jsx with jsx enabled', () => { replaceJsMacro( ` <div class="this has conflicts"> This has no conflicts </div> `, ` <div class="this a c"> This has no conflicts </div> `, '.has { content: "nothing" } .no {} .conflicts { content: "at all" }', ); });<|fim▁end|>
` const text = 'This has no conflicts';
<|file_name|>no_pivot_ldl_test.py<|end_file_name|><|fim▁begin|># Copyright 2021 The TensorFlow Probability Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Tests for no_pivot_ldl.""" import numpy as np import tensorflow.compat.v2 as tf from tensorflow_probability.python.experimental.linalg.no_pivot_ldl import no_pivot_ldl from tensorflow_probability.python.experimental.linalg.no_pivot_ldl import simple_robustified_cholesky from tensorflow_probability.python.internal import test_util @test_util.test_all_tf_execution_regimes class NoPivotLDLTest(test_util.TestCase): def _randomDiag(self, n, batch_shape, low, high, forcemin=None, seed=42): np.random.seed(seed) shape = batch_shape + [n] diag = np.random.uniform(low, high, size=shape) if forcemin: assert forcemin < low diag = np.where(diag == np.min(diag, axis=-1)[..., np.newaxis], forcemin, diag) return diag def _randomTril(self, n, batch_shape, seed=42): np.random.seed(seed) unit_tril = np.random.standard_normal(batch_shape + [n, n]) unit_tril = np.tril(unit_tril) unit_tril[..., range(n), range(n)] = 1. return unit_tril def _randomSymmetricMatrix(self, n, batch_shape, low, high, forcemin=None, seed=42): diag = self._randomDiag(n, batch_shape, low, high, forcemin, seed) unit_tril = self._randomTril(n, batch_shape, seed)<|fim▁hole|> def testLDLRandomPSD(self): matrix = self._randomSymmetricMatrix( 10, [2, 1, 3], 1e-6, 10., forcemin=0., seed=42) left, diag = self.evaluate(no_pivot_ldl(matrix)) reconstruct = np.einsum('...ij,...j,...kj->...ik', left, diag, left) self.assertAllClose(matrix, reconstruct) def testLDLIndefinite(self): matrix = [[1., 2.], [2., 1.]] left, diag = self.evaluate(no_pivot_ldl(matrix)) reconstruct = np.einsum('...ij,...j,...kj->...ik', left, diag, left) self.assertAllClose(matrix, reconstruct) def testSimpleIsCholeskyRandomPD(self): matrix = self._randomSymmetricMatrix(10, [2, 1, 3], 1e-6, 10., seed=42) chol, left = self.evaluate( (tf.linalg.cholesky(matrix), simple_robustified_cholesky(matrix))) self.assertAllClose(chol, left) def testSimpleIndefinite(self): matrix = [[1., 2.], [2., 1.]] left = self.evaluate( simple_robustified_cholesky(matrix, tol=.1)) reconstruct = np.einsum('...ij,...kj->...ik', left, left) eigv, _ = self.evaluate(tf.linalg.eigh(reconstruct)) self.assertAllTrue(eigv > 0.) def testXlaCompileBug(self): inp = tf.Variable([[2., 1.], [1., 2.]]) self.evaluate(inp.initializer) alt_chol = simple_robustified_cholesky alt_chol_nojit = tf.function(alt_chol, autograph=False, jit_compile=False) alt_chol_jit = tf.function(alt_chol, autograph=False, jit_compile=True) answer = np.array([[1.4142135, 0.], [0.70710677, 1.2247449]]) self.assertAllClose(self.evaluate(alt_chol(inp)), answer) self.assertAllClose(self.evaluate(alt_chol_nojit(inp)), answer) self.assertAllClose(self.evaluate(alt_chol_jit(inp)), answer) with tf.GradientTape(): chol_with_grad = alt_chol(inp) chol_nojit_with_grad = alt_chol_nojit(inp) # Not supported by TF-XLA (WAI), see b/193584244 # chol_jit_with_grad = alt_chol_jit(inp) self.assertAllClose(self.evaluate(chol_with_grad), answer) self.assertAllClose(self.evaluate(chol_nojit_with_grad), answer) # But wrapping the tape in tf.function should work. @tf.function(autograph=False, jit_compile=True) def jit_with_grad(mat): with tf.GradientTape(): return alt_chol_jit(mat) self.assertAllClose(self.evaluate(jit_with_grad(inp)), answer) if __name__ == '__main__': test_util.main()<|fim▁end|>
return np.einsum('...ij,...j,...kj->...ik', unit_tril, diag, unit_tril)
<|file_name|>test_restore.py<|end_file_name|><|fim▁begin|># Back In Time # Copyright (C) 2008-2017 Oprea Dan, Bart de Koning, Richard Bailey, Germar Reitze # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along<|fim▁hole|> import os import sys import unittest import pwd import grp import stat from tempfile import TemporaryDirectory from test import generic sys.path.append(os.path.join(os.path.dirname(__file__), '..')) import config import snapshots import mount CURRENTUID = os.geteuid() CURRENTUSER = pwd.getpwuid(CURRENTUID).pw_name CURRENTGID = os.getegid() CURRENTGROUP = grp.getgrgid(CURRENTGID).gr_name class RestoreTestCase(generic.SnapshotsWithSidTestCase): def setUp(self): super(RestoreTestCase, self).setUp() self.include = TemporaryDirectory() generic.create_test_files(self.sid.pathBackup(self.include.name)) def tearDown(self): super(RestoreTestCase, self).tearDown() self.include.cleanup() def prepairFileInfo(self, restoreFile, mode = 33260): d = self.sid.fileInfo d[restoreFile.encode('utf-8', 'replace')] = (mode, CURRENTUSER.encode('utf-8', 'replace'), CURRENTGROUP.encode('utf-8', 'replace')) self.sid.fileInfo = d class TestRestore(RestoreTestCase): def test_restore_multiple_files(self): restoreFile1 = os.path.join(self.include.name, 'test') self.prepairFileInfo(restoreFile1) restoreFile2 = os.path.join(self.include.name, 'foo', 'bar', 'baz') self.prepairFileInfo(restoreFile2) self.sn.restore(self.sid, (restoreFile1, restoreFile2)) self.assertIsFile(restoreFile1) with open(restoreFile1, 'rt') as f: self.assertEqual(f.read(), 'bar') self.assertEqual(33260, os.stat(restoreFile1).st_mode) self.assertIsFile(restoreFile2) with open(restoreFile2, 'rt') as f: self.assertEqual(f.read(), 'foo') self.assertEqual(33260, os.stat(restoreFile2).st_mode) def test_restore_to_different_destination(self): restoreFile = os.path.join(self.include.name, 'test') self.prepairFileInfo(restoreFile) with TemporaryDirectory() as dest: destRestoreFile = os.path.join(dest, 'test') self.sn.restore(self.sid, restoreFile, restore_to = dest) self.assertIsFile(destRestoreFile) with open(destRestoreFile, 'rt') as f: self.assertEqual(f.read(), 'bar') self.assertEqual(33260, os.stat(destRestoreFile).st_mode) def test_restore_folder_to_different_destination(self): restoreFolder = self.include.name self.prepairFileInfo(restoreFolder) self.prepairFileInfo(os.path.join(restoreFolder, 'test')) self.prepairFileInfo(os.path.join(restoreFolder, 'file with spaces')) with TemporaryDirectory() as dest: destRestoreFile = os.path.join(dest, os.path.basename(restoreFolder), 'test') self.sn.restore(self.sid, restoreFolder, restore_to = dest) self.assertIsFile(destRestoreFile) with open(destRestoreFile, 'rt') as f: self.assertEqual(f.read(), 'bar') self.assertEqual(33260, os.stat(destRestoreFile).st_mode) def test_delete(self): restoreFolder = self.include.name junkFolder = os.path.join(self.include.name, 'junk') os.makedirs(junkFolder) self.assertExists(junkFolder) self.prepairFileInfo(restoreFolder) self.sn.restore(self.sid, restoreFolder, delete = True) self.assertIsFile(restoreFolder, 'test') self.assertNotExists(junkFolder) def test_backup(self): restoreFile = os.path.join(self.include.name, 'test') self.prepairFileInfo(restoreFile) with open(restoreFile, 'wt') as f: f.write('fooooooooooooooooooo') self.sn.restore(self.sid, restoreFile, backup = True) self.assertIsFile(restoreFile) with open(restoreFile, 'rt') as f: self.assertEqual(f.read(), 'bar') backupFile = restoreFile + self.sn.backupSuffix() self.assertIsFile(backupFile) with open(backupFile, 'rt') as f: self.assertEqual(f.read(), 'fooooooooooooooooooo') def test_no_backup(self): restoreFile = os.path.join(self.include.name, 'test') self.prepairFileInfo(restoreFile) with open(restoreFile, 'wt') as f: f.write('fooooooooooooooooooo') self.sn.restore(self.sid, restoreFile, backup = False) self.assertIsFile(restoreFile) with open(restoreFile, 'rt') as f: self.assertEqual(f.read(), 'bar') backupFile = restoreFile + self.sn.backupSuffix() self.assertIsNoFile(backupFile) def test_only_new(self): restoreFile = os.path.join(self.include.name, 'test') self.prepairFileInfo(restoreFile) with open(restoreFile, 'wt') as f: f.write('fooooooooooooooooooo') # change mtime to be newer than the one in snapshot st = os.stat(restoreFile) atime = st[stat.ST_ATIME] mtime = st[stat.ST_MTIME] new_mtime = mtime + 3600 os.utime(restoreFile, (atime, new_mtime)) self.sn.restore(self.sid, restoreFile, only_new = True) self.assertIsFile(restoreFile) with open(restoreFile, 'rt') as f: self.assertEqual(f.read(), 'fooooooooooooooooooo') class TestRestoreLocal(RestoreTestCase): """ Tests which should run on local and ssh profile """ def test_restore(self): restoreFile = os.path.join(self.include.name, 'test') self.prepairFileInfo(restoreFile) self.sn.restore(self.sid, restoreFile) self.assertIsFile(restoreFile) with open(restoreFile, 'rt') as f: self.assertEqual(f.read(), 'bar') self.assertEqual(33260, os.stat(restoreFile).st_mode) def test_restore_file_with_spaces(self): restoreFile = os.path.join(self.include.name, 'file with spaces') self.prepairFileInfo(restoreFile) self.sn.restore(self.sid, restoreFile) self.assertIsFile(restoreFile) with open(restoreFile, 'rt') as f: self.assertEqual(f.read(), 'asdf') self.assertEqual(33260, os.stat(restoreFile).st_mode) @unittest.skipIf(not generic.LOCAL_SSH, 'Skip as this test requires a local ssh server, public and private keys installed') class TestRestoreSSH(generic.SSHSnapshotsWithSidTestCase, TestRestoreLocal): def setUp(self): super(TestRestoreSSH, self).setUp() self.include = TemporaryDirectory() generic.create_test_files(os.path.join(self.remoteSIDBackupPath, self.include.name[1:])) #mount self.cfg.setCurrentHashId(mount.Mount(cfg = self.cfg).mount()) def tearDown(self): #unmount mount.Mount(cfg = self.cfg).umount(self.cfg.current_hash_id) super(TestRestoreSSH, self).tearDown() self.include.cleanup()<|fim▁end|>
# with this program; if not, write to the Free Software Foundation,Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
<|file_name|>create_response.js<|end_file_name|><|fim▁begin|>App.CreateResponseView = Ember.View.extend({ templateName: "studentapp/create_response",<|fim▁hole|> didInsertElement: function() { this.get('controller').send('loadFields'); }.observes('controller.model') });<|fim▁end|>
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Cinder OS API WSGI application.""" import sys<|fim▁hole|>from jacket.objects import storage warnings.simplefilter('once', DeprecationWarning) from oslo_config import cfg from oslo_log import log as logging from oslo_service import wsgi from jacket.storage import i18n i18n.enable_lazy() # Need to register global_opts from jacket.common.storage import config from jacket import rpc from jacket.storage import version CONF = cfg.CONF def initialize_application(): storage.register_all() CONF(sys.argv[1:], project='storage', version=version.version_string()) logging.setup(CONF, "storage") config.set_middleware_defaults() rpc.init(CONF) return wsgi.Loader(CONF).load_app(name='osapi_volume')<|fim▁end|>
import warnings
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main import ( "archive/zip" "bitbucket.org/tebeka/nrsc" "fmt" "github.com/remyoudompheng/go-misc/zipfs" "html" "log" "net/http" "os" "strings" "text/template" ) func checkinputdir() { if _, err := os.Stat("./dzi"); os.IsNotExist(err) { fmt.Println("-- missing dzi directory, creating.") os.Mkdir("./dzi", 0775) return } } func dzi(res http.ResponseWriter, req *http.Request) { dzi := "dzi/" + strings.Split(html.EscapeString(req.URL.Path), "/")[2] + ".zip" z, err := zip.OpenReader(dzi) if err != nil { http.Error(res, err.Error(), 404) return } defer z.Close() http.StripPrefix("/dzi/", http.FileServer(zipfs.NewZipFS(&z.Reader))).ServeHTTP(res, req) } var templ = template.Must(template.New("index").Parse(indexTemplate)) func view(res http.ResponseWriter, req *http.Request) {<|fim▁hole|> func main() { checkinputdir() nrsc.Handle("/openseadragon/") http.HandleFunc("/dzi/", dzi) http.HandleFunc("/view/", view) fmt.Println("-- running on http://127.0.0.1:8080") log.Fatal(http.ListenAndServe(":8080", nil)) } const indexTemplate = ` <html> <head><title>deepzoom-server {{.}}</title></head> <body> <div id="openseadragon1" style="width: 1024px; height: 768px;"></div> <script src="/openseadragon/openseadragon.min.js"></script> <script type="text/javascript"> var viewer = OpenSeadragon({ id: "openseadragon1", prefixUrl: "/openseadragon/images/", tileSources: "{{.}}" }); </script> </body> </html> `<|fim▁end|>
dziID := strings.Split(html.EscapeString(req.URL.Path), "/")[2] dzi := "/dzi/" + dziID + "/" + dziID + ".dzi" templ.Execute(res, dzi) }
<|file_name|>attr_iterator.rs<|end_file_name|><|fim▁begin|>// Copyright 2018, The Gtk-rs Project Developers. // See the COPYRIGHT file at the top-level directory of this distribution. // Licensed under the MIT license, see the LICENSE file or <https://opensource.org/licenses/MIT> use glib::translate::*; use glib_sys; use pango_sys; use AttrIterator; use Attribute; use FontDescription; use Language;<|fim▁hole|> use std::ptr; impl AttrIterator { pub fn get_font( &mut self, desc: &mut FontDescription, language: Option<&Language>, extra_attrs: &[&Attribute], ) { unsafe { let stash_vec: Vec<_> = extra_attrs.iter().rev().map(|v| v.to_glib_none()).collect(); let mut list: *mut glib_sys::GSList = ptr::null_mut(); for stash in &stash_vec { list = glib_sys::g_slist_prepend(list, Ptr::to(stash.0)); } pango_sys::pango_attr_iterator_get_font( self.to_glib_none_mut().0, desc.to_glib_none_mut().0, &mut language.to_glib_none().0, &mut list, ); } } }<|fim▁end|>
<|file_name|>wait_condition.rs<|end_file_name|><|fim▁begin|>use alloc::arc::Arc; use collections::Vec; use spin::{Mutex, RwLock}; use context::{self, Context}; #[derive(Debug)] pub struct WaitCondition { contexts: Mutex<Vec<Arc<RwLock<Context>>>> } impl WaitCondition { pub fn new() -> WaitCondition { WaitCondition { contexts: Mutex::new(Vec::with_capacity(16)) } } pub fn notify(&self) -> usize { let mut contexts = self.contexts.lock(); let len = contexts.len(); while let Some(context_lock) = contexts.pop() { context_lock.write().unblock(); }<|fim▁hole|> { let context_lock = { let contexts = context::contexts(); let context_lock = contexts.current().expect("WaitCondition::wait: no context"); context_lock.clone() }; context_lock.write().block(); self.contexts.lock().push(context_lock); } unsafe { context::switch(); } } } impl Drop for WaitCondition { fn drop(&mut self){ self.notify(); } }<|fim▁end|>
len } pub fn wait(&self) {
<|file_name|>BendpointSnapping.js<|end_file_name|><|fim▁begin|>import { assign, forEach, isArray } from 'min-dash'; var abs= Math.abs, round = Math.round; var TOLERANCE = 10; export default function BendpointSnapping(eventBus) { function snapTo(values, value) { if (isArray(values)) { var i = values.length; while (i--) if (abs(values[i] - value) <= TOLERANCE) { return values[i]; } } else { values = +values; var rem = value % values; if (rem < TOLERANCE) { return value - rem; } if (rem > values - TOLERANCE) { return value - rem + values; } } return value; } function mid(element) { if (element.width) { return { x: round(element.width / 2 + element.x), y: round(element.height / 2 + element.y) }; } } // connection segment snapping ////////////////////// function getConnectionSegmentSnaps(context) { <|fim▁hole|> waypoints = connection.waypoints, segmentStart = context.segmentStart, segmentStartIndex = context.segmentStartIndex, segmentEnd = context.segmentEnd, segmentEndIndex = context.segmentEndIndex, axis = context.axis; if (snapPoints) { return snapPoints; } var referenceWaypoints = [ waypoints[segmentStartIndex - 1], segmentStart, segmentEnd, waypoints[segmentEndIndex + 1] ]; if (segmentStartIndex < 2) { referenceWaypoints.unshift(mid(connection.source)); } if (segmentEndIndex > waypoints.length - 3) { referenceWaypoints.unshift(mid(connection.target)); } context.snapPoints = snapPoints = { horizontal: [] , vertical: [] }; forEach(referenceWaypoints, function(p) { // we snap on existing bendpoints only, // not placeholders that are inserted during add if (p) { p = p.original || p; if (axis === 'y') { snapPoints.horizontal.push(p.y); } if (axis === 'x') { snapPoints.vertical.push(p.x); } } }); return snapPoints; } eventBus.on('connectionSegment.move.move', 1500, function(event) { var context = event.context, snapPoints = getConnectionSegmentSnaps(context), x = event.x, y = event.y, sx, sy; if (!snapPoints) { return; } // snap sx = snapTo(snapPoints.vertical, x); sy = snapTo(snapPoints.horizontal, y); // correction x/y var cx = (x - sx), cy = (y - sy); // update delta assign(event, { dx: event.dx - cx, dy: event.dy - cy, x: sx, y: sy }); }); // bendpoint snapping ////////////////////// function getBendpointSnaps(context) { var snapPoints = context.snapPoints, waypoints = context.connection.waypoints, bendpointIndex = context.bendpointIndex; if (snapPoints) { return snapPoints; } var referenceWaypoints = [ waypoints[bendpointIndex - 1], waypoints[bendpointIndex + 1] ]; context.snapPoints = snapPoints = { horizontal: [] , vertical: [] }; forEach(referenceWaypoints, function(p) { // we snap on existing bendpoints only, // not placeholders that are inserted during add if (p) { p = p.original || p; snapPoints.horizontal.push(p.y); snapPoints.vertical.push(p.x); } }); return snapPoints; } eventBus.on('bendpoint.move.move', 1500, function(event) { var context = event.context, snapPoints = getBendpointSnaps(context), target = context.target, targetMid = target && mid(target), x = event.x, y = event.y, sx, sy; if (!snapPoints) { return; } // snap sx = snapTo(targetMid ? snapPoints.vertical.concat([ targetMid.x ]) : snapPoints.vertical, x); sy = snapTo(targetMid ? snapPoints.horizontal.concat([ targetMid.y ]) : snapPoints.horizontal, y); // correction x/y var cx = (x - sx), cy = (y - sy); // update delta assign(event, { dx: event.dx - cx, dy: event.dy - cy, x: event.x - cx, y: event.y - cy }); }); } BendpointSnapping.$inject = [ 'eventBus' ];<|fim▁end|>
var snapPoints = context.snapPoints, connection = context.connection,
<|file_name|>component-names-generator.ts<|end_file_name|><|fim▁begin|>import fs = require('fs'); import path = require('path'); export default class ComponentNamesGenerator { private _encoding = 'utf8'; private _config; constructor(config) { this._config = config;<|fim▁hole|> return ` '${fileName.replace('.ts', '')}'`; } validateFileName(fileName: string) { return this._config.excludedFileNames.indexOf(fileName) < 0; } generate() { let directoryPath = this._config.componentFilesPath; let files = fs.readdirSync(directoryPath); let fileList = files .filter(fileName => !fs.lstatSync(path.join(directoryPath, fileName)).isFile() && this.validateFileName(fileName)) .map(fileName => this.prepareTagName(fileName)) .join(',\n'); let resultContent = `export const componentNames = [\n${fileList}\n];\n`; fs.writeFileSync(this._config.outputFileName, resultContent, { encoding: this._encoding }); } };<|fim▁end|>
} prepareTagName(fileName: string) {
<|file_name|>WrapperClassBean.java<|end_file_name|><|fim▁begin|>/* * $Id: WrapperClassBean.java 799110 2009-07-29 22:44:26Z musachy $ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.struts2.json; import java.util.List; import java.util.Map; public class WrapperClassBean { private String stringField; private Integer intField; private int nullIntField; private Boolean booleanField; private boolean primitiveBooleanField1;<|fim▁hole|> private boolean primitiveBooleanField3; private Character charField; private Long longField; private Float floatField; private Double doubleField; private Object objectField; private Byte byteField; private List<SimpleValue> listField; private List<Map<String, Long>> listMapField; private Map<String, List<Long>> mapListField; private Map<String, Long>[] arrayMapField; public List<SimpleValue> getListField() { return listField; } public void setListField(List<SimpleValue> listField) { this.listField = listField; } public List<Map<String, Long>> getListMapField() { return listMapField; } public void setListMapField(List<Map<String, Long>> listMapField) { this.listMapField = listMapField; } public Map<String, List<Long>> getMapListField() { return mapListField; } public void setMapListField(Map<String, List<Long>> mapListField) { this.mapListField = mapListField; } public Map<String, Long>[] getArrayMapField() { return arrayMapField; } public void setArrayMapField(Map<String, Long>[] arrayMapField) { this.arrayMapField = arrayMapField; } public Boolean getBooleanField() { return booleanField; } public void setBooleanField(Boolean booleanField) { this.booleanField = booleanField; } public boolean isPrimitiveBooleanField1() { return primitiveBooleanField1; } public void setPrimitiveBooleanField1(boolean primitiveBooleanField1) { this.primitiveBooleanField1 = primitiveBooleanField1; } public boolean isPrimitiveBooleanField2() { return primitiveBooleanField2; } public void setPrimitiveBooleanField2(boolean primitiveBooleanField2) { this.primitiveBooleanField2 = primitiveBooleanField2; } public boolean isPrimitiveBooleanField3() { return primitiveBooleanField3; } public void setPrimitiveBooleanField3(boolean primitiveBooleanField3) { this.primitiveBooleanField3 = primitiveBooleanField3; } public Byte getByteField() { return byteField; } public void setByteField(Byte byteField) { this.byteField = byteField; } public Character getCharField() { return charField; } public void setCharField(Character charField) { this.charField = charField; } public Double getDoubleField() { return doubleField; } public void setDoubleField(Double doubleField) { this.doubleField = doubleField; } public Float getFloatField() { return floatField; } public void setFloatField(Float floatField) { this.floatField = floatField; } public Integer getIntField() { return intField; } public void setIntField(Integer intField) { this.intField = intField; } public int getNullIntField() { return nullIntField; } public void setNullIntField(int nullIntField) { this.nullIntField = nullIntField; } public Long getLongField() { return longField; } public void setLongField(Long longField) { this.longField = longField; } public Object getObjectField() { return objectField; } public void setObjectField(Object objectField) { this.objectField = objectField; } public String getStringField() { return stringField; } public void setStringField(String stringField) { this.stringField = stringField; } }<|fim▁end|>
private boolean primitiveBooleanField2;
<|file_name|>Database.js<|end_file_name|><|fim▁begin|>/* * This file is part of the easy framework. * * (c) Julien Sergent <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ const ConfigLoader = require( 'easy/core/ConfigLoader' ) const EventsEmitter = require( 'events' ) /** * @class Database */ class Database { /** * @constructor */ constructor( config ) { this._config = config this.stateEmitter = new EventsEmitter() this.name = config.config.name this.init() } /** * init - init attributes */ init() { this._instance = null this._connector = this._config.connector this.resetProperties() } /** * Reset instance and connected state * * @memberOf Database */ resetProperties() { this._connected = false this._connectionError = null } /** * load - load database config */ async start() { await this.connect() } /** * restart - restart database component */ async restart() { this.resetProperties() await this.start() } /** * connect - connect database to instance */ async connect() { const { instance, connected, error } = await this.config.connector() const oldConnected = this.connected this.instance = instance this.connected = connected this.connectionError = error if ( this.connected !== oldConnected ) { this.stateEmitter.emit( 'change', this.connected ) } if ( error ) { throw new Error( error ) } } /** * Reset database connection and instance * * @memberOf Database */ disconnect() { const oldConnected = this.connected this.resetProperties() if ( this.connected !== oldConnected ) { this.stateEmitter.emit( 'change', this.connected ) } } /** * verifyConnectionHandler - handler called by daemon which indicates if database still available or not * * @returns {Promise} * * @memberOf Database */ verifyConnectionHandler() { return this.config.verifyConnectionHandler() } /** * Branch handler on database state events * * @param {Function} handler * * @memberOf Database */ connectToStateEmitter( handler ) { this.stateEmitter.on( 'change', handler ) } /** * get - get database instance * * @returns {Object} */ get instance() { return this._instance } /** * set - set database instance * * @param {Object} instance * @returns {Object} */ set instance( instance ) { this._instance = instance return this._instance } /** * get - get database connection state * * @returns {Object} */ get connected() { return this._connected } /** * set - set database connection state * * @param {boolean} connected * @returns {Database} */ set connected( connected ) { this._connected = connected return this } /**<|fim▁hole|> * @returns {Object} */ get config() { return this._config } /** * Get connection error * * @readonly * * @memberOf Database */ get connectionError() { return this._connectionError } /** * Set connection error * * @returns {Database} * * @memberOf Database */ set connectionError( error ) { this._connectionError = error return this } } module.exports = Database<|fim▁end|>
* get - get database configurations *
<|file_name|>assert0.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python -tt<|fim▁hole|> """Use a descriptive macro instead of assert(false);""" error_msg = 'Use NEVER_HERE() from base/macros.h here.' regexp = r"""assert *\( *(0|false) *\)""" forbidden = [ 'assert(0)', 'assert(false)', ] allowed = [ 'NEVER_HERE()', ]<|fim▁end|>
# encoding: utf-8 #
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import include from django.contrib import admin from django.urls import path app_name = "transmission" <|fim▁hole|>urlpatterns = [ path("admin/", admin.site.urls), path("torrents/", include("torrents.urls", namespace="torrents")), path("shows/", include("shows.urls", namespace="shows")), ]<|fim▁end|>
<|file_name|>test_source.py<|end_file_name|><|fim▁begin|>from blueice.test_helpers import * from blueice.model import Model def test_mcsource(): conf = test_conf(mc=True) m = Model(conf) s = m.sources[0] bins = conf['analysis_space'][0][1] assert s.events_per_day == 1000 assert s.fraction_in_range > 0.9999 # Ten sigma events happen sometimes.. assert abs(s.pdf([0]) - stats.norm.pdf(0)) < 0.01 # Verify linear interpolation<|fim▁hole|><|fim▁end|>
assert (s.pdf([bins[0]]) + s.pdf([bins[1]])) / 2 == s.pdf([(bins[0] + bins[1])/2])
<|file_name|>create_venv.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ The script can be used to setup a virtual environment for running Firefox UI Tests. It will automatically install the firefox ui test package, all its dependencies, and optional packages if specified. """ import argparse import os import shutil import subprocess import sys import urllib2 import zipfile # Link to the folder, which contains the zip archives of virtualenv VIRTUALENV_URL = 'https://github.com/pypa/virtualenv/archive/%(VERSION)s.zip' VIRTUALENV_VERSION = '12.1.1' here = os.path.dirname(os.path.abspath(__file__)) venv_script_path = 'Scripts' if sys.platform == 'win32' else 'bin' venv_activate = os.path.join(venv_script_path, 'activate') venv_activate_this = os.path.join(venv_script_path, 'activate_this.py') venv_python_bin = os.path.join(venv_script_path, 'python') usage_message = """ *********************************************************************** To run the Firefox UI Tests, activate the virtual environment: {}{} See firefox-ui-tests --help for all options *********************************************************************** """ def download(url, target): """Downloads the specified url to the given target.""" response = urllib2.urlopen(url) with open(target, 'wb') as f: f.write(response.read()) return target def create_virtualenv(target, python_bin=None): script_path = os.path.join(here, 'virtualenv-%s' % VIRTUALENV_VERSION, 'virtualenv.py') print 'Downloading virtualenv %s' % VIRTUALENV_VERSION zip_path = download(VIRTUALENV_URL % {'VERSION': VIRTUALENV_VERSION}, os.path.join(here, 'virtualenv.zip')) try: with zipfile.ZipFile(zip_path, 'r') as f: f.extractall(here) print 'Creating new virtual environment' cmd_args = [sys.executable, script_path, target] if python_bin: cmd_args.extend(['-p', python_bin]) subprocess.check_call(cmd_args) finally: try: os.remove(zip_path) except OSError: pass shutil.rmtree(os.path.dirname(script_path), ignore_errors=True) def main(): parser = argparse.ArgumentParser() parser.add_argument('-p', '--python', dest='python', metavar='BINARY', help='The Python interpreter to use.') parser.add_argument('venv', metavar='PATH', help='Path to the environment to be created.') args = parser.parse_args() # Remove an already existent virtual environment if os.path.exists(args.venv): print 'Removing already existent virtual environment at: %s' % args.venv shutil.rmtree(args.venv, True) create_virtualenv(args.venv, python_bin=args.python) # Activate the environment venv = os.path.join(args.venv, venv_activate_this) execfile(venv, dict(__file__=venv)) # Install Firefox UI tests, dependencies and optional packages command = ['pip', 'install', '-r', 'requirements.txt', '-r', 'requirements_optional.txt', ]<|fim▁hole|> subprocess.check_call(command, cwd=os.path.dirname(here)) # Print the user instructions print usage_message.format('' if sys.platform == 'win32' else 'source ', os.path.join(args.venv, venv_activate)) if __name__ == "__main__": main()<|fim▁end|>
print 'Installing Firefox UI Tests and dependencies...' print 'Command: %s' % command
<|file_name|>implementation.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ''' Created on Nov 14, 2014 Implementation of a hand speed-based segmentation module @author: Arturo Curiel ''' import zope.interface as zi try: import magic except:<|fim▁hole|> from nixtla.segmentation.handspeed_based.adapters import FromTextTracking from nixtla.segmentation.handspeed_based.markers import IHandSpeedBasedSegments from nixtla.segmentation.handspeed_based.tools import IntervalList class SegmentationModule(BaseModule): """Hand speed-based segmentation""" zi.implements(ISegmentationModule) def __init__(self, **args): try: self.analysis_window = int(args['analysis_window']) except: self.analysis_window = 5 try: self.speed_threshold = int(args['speed_threshold']) except: self.speed_threshold = 6.0 try: self.articulators = args['articulators'].\ replace('[','').\ replace(']','').\ replace(' ','').\ split(",") except: self.articulators = ['right_hand', 'left_hand'] self.numeric_data = None # Register adapters self.register_module_adapters(FromTextTracking) self.interval_list = IntervalList(self.speed_threshold, articulators=self.articulators, driver=self.articulators[0]) super(SegmentationModule, self).__init__(IHandSpeedBasedSegments) def callable(self, input_data): signer_id, numeric_data = input_data self.numeric_data = numeric_data for i in range(len(numeric_data)-1): row = numeric_data[i:i+1] # We get each information row and pass it to determine where in # the segmentation it belongs for articulator in self.articulators: results = self.interval_list.\ include_in_articulator_interval_2window( articulator, row) if results: for result in results: self.send_to_channels((signer_id, {articulator:result} )) return True def check_input_compliance(self, input_data): """Assert that input_data is segmentable""" # Check that we are still passing a valid # video file and an id assert len(input_data) == 2 assert "str" in str(type(input_data[0])) # Check that we are passing speeds, the only # measure truly needed to calculate segments for articulator in self.articulators: assert not input_data[1][articulator+'_v'].empty<|fim▁end|>
import nixtla.core.tools.magic_win as magic from nixtla.core.base_module import BaseModule from nixtla.segmentation.interface import ISegmentationModule
<|file_name|>utils_test.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The Serviced Authors. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|> // +build unit // Package agent implements a service that runs on a serviced node. It is // responsible for ensuring that a particular node is running the correct services // and reporting the state and health of those services back to the master // serviced. package utils import ( "testing" ) // Test GetIPv4Addresses() func TestGetIPv4Addresses(t *testing.T) { ips, err := GetIPv4Addresses() if err != nil { t.Errorf("Failed to get ipv4 addresses: %s", err) t.Fail() } expectedMinimumLen := 1 if len(ips) < expectedMinimumLen { t.Errorf("minimum IPs expected %d > retrieved %d ips:%v", expectedMinimumLen, len(ips), ips) t.Fail() } }<|fim▁end|>
// See the License for the specific language governing permissions and // limitations under the License.
<|file_name|>tools.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # vim: ai ts=4 sts=4 et sw=4 nu from __future__ import (unicode_literals, absolute_import, division, print_function) import re import unicodedata import datetime import subprocess from py3compat import string_types, text_type from django.utils import timezone from django.conf import settings from uninond.models.SMSMessages import SMSMessage # default country prefix COUNTRY_PREFIX = getattr(settings, 'COUNTRY_PREFIX', 223) ALL_COUNTRY_CODES = [1242, 1246, 1264, 1268, 1284, 1340, 1345, 1441, 1473, 1599, 1649, 1664, 1670, 1671, 1684, 1758, 1767, 1784, 1809, 1868, 1869, 1876, 1, 20, 212, 213, 216, 218, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 27, 290, 291, 297, 298, 299, 30, 31, 32, 33, 34, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, 36, 370, 371, 372, 373, 374, 375, 376, 377, 378, 380, 381, 382, 385, 386, 387, 389, 39, 40, 41, 420, 421, 423, 43, 44, 45, 46, 47, 48, 49, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 51, 52, 53, 54, 55, 56, 57, 58, 590, 591, 592, 593, 595, 597, 598, 599, 60, 61, 62, 63, 64, 65, 66, 670, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 685, 686, 687, 688, 689, 690, 691, 692, 7, 81, 82, 84, 850, 852, 853, 855, 856, 86, 870, 880, 886, 90, 91, 92, 93, 94, 95, 960, 961, 962, 963, 964, 965, 966, 967, 968, 970, 971, 972, 973, 974, 975, 976, 977, 98, 992, 993, 994, 995, 996, 998] MONTHS = ['J', 'F', 'M', 'A', 'Y', 'U', 'L', 'G', 'S', 'O', 'N', 'D'] ALPHA = 'abcdefghijklmnopqrstuvwxyz' def phonenumber_isint(number): ''' whether number is in international format ''' if re.match(r'^[+|(]', number): return True if re.match(r'^\d{1,4}\.\d+$', number): return True return False def phonenumber_indicator(number): ''' extract indicator from number or "" ''' for indic in ALL_COUNTRY_CODES: if number.startswith("%{}".format(indic)) \ or number.startswith("+{}".format(indic)): return str(indic) return "" def phonenumber_cleaned(number): ''' return (indicator, number) cleaned of space and other ''' # clean up if not isinstance(number, string_types): number = number.__str__() # cleanup markup clean_number = re.sub(r'[^\d\+]', '', number) if phonenumber_isint(clean_number): h, indicator, clean_number = \ clean_number.partition(phonenumber_indicator(clean_number)) return (indicator, clean_number) return (None, clean_number) def join_phonenumber(prefix, number, force_intl=True): if not number: return None if not prefix and force_intl: prefix = COUNTRY_PREFIX return "+{prefix}{number}".format(prefix=prefix, number=number) def phonenumber_repr(number, skip_indicator=str(COUNTRY_PREFIX)): ''' properly formated for visualization: (xxx) xx xx xx xx ''' def format(number): if len(number) % 2 == 0: span = 2 else: span = 3 # use NBSP return " ".join(["".join(number[i:i + span]) for i in range(0, len(number), span)]) indicator, clean_number = phonenumber_cleaned(number) # string-only identity goes into indicator if indicator is None and not clean_number: return number.strip() if indicator and indicator != skip_indicator: return "(%(ind)s) %(num)s" \ % {'ind': indicator, 'num': format(clean_number)} return format(clean_number) def normalized_phonenumber(number_text): if number_text is None or not number_text.strip(): return None return join_phonenumber(*phonenumber_cleaned(number_text)) def operator_from_malinumber(number, default=settings.FOREIGN): ''' ORANGE or MALITEL based on the number prefix ''' indicator, clean_number = phonenumber_cleaned( normalized_phonenumber(number)) if indicator is not None and indicator != str(COUNTRY_PREFIX): return default for operator, opt in settings.OPERATORS.items(): for prefix in opt[1]: if clean_number.startswith(str(prefix)): return operator return default def send_sms(to, text): return SMSMessage.objects.create( direction=SMSMessage.OUTGOING, identity=to, event_on=timezone.now(), text=text) def fake_message(to, text): message = send_sms(to, text) message.handled = True message.save() return message def to_ascii(text): return unicodedata.normalize('NFKD', unicode(text)) \ .encode('ASCII', 'ignore').strip() def date_to_ident(adate): year, month, day = adate.timetuple()[0:3] hyear = text_type(year)[-1] if day > 16: hmonth = ALPHA[month * 2] hday = hex(day // 2)[2:] else: hmonth = ALPHA[month] hday = hex(day)[2:] return "{y}{m}{d}".format(m=hmonth, d=hday, y=hyear) def ident_to_date(ident): hyear, hmonth, hday = ident[0], ident[1], ident[2:] year = int('201{}'.format(hyear)) day = int(hday, 16) month = ALPHA.index(hmonth) if month > 12: month //= 2 day *= 2 return datetime.date(year, month, day) def dispatch_sms(text, roles, root): sent_messages = [] for identity in root.ancestors_contacts(roles, identies_only=True): sent_messages.append(send_sms(identity, text)) return sent_messages<|fim▁hole|> return ("{date} à {time}" .format(date=adatetime.strftime("%A %-d"), time=adatetime.strftime("%Hh%M")).lower()) def exec_cmd(command): process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE) process.wait() return process.returncode<|fim▁end|>
def datetime_repr(adatetime):
<|file_name|>ParsedSubdomainMeshGenerator.C<|end_file_name|><|fim▁begin|>//* This file is part of the MOOSE framework //* https://www.mooseframework.org //* //* All rights reserved, see COPYRIGHT for full restrictions //* https://github.com/idaholab/moose/blob/master/COPYRIGHT //* //* Licensed under LGPL 2.1, please see LICENSE for details //* https://www.gnu.org/licenses/lgpl-2.1.html #include "ParsedSubdomainMeshGenerator.h" #include "Conversion.h" #include "MooseMeshUtils.h" #include "CastUniquePointer.h" #include "libmesh/fparser_ad.hh" #include "libmesh/elem.h" registerMooseObject("MooseApp", ParsedSubdomainMeshGenerator); defineLegacyParams(ParsedSubdomainMeshGenerator); InputParameters ParsedSubdomainMeshGenerator::validParams() { InputParameters params = MeshGenerator::validParams(); params += FunctionParserUtils<false>::validParams(); params.addRequiredParam<MeshGeneratorName>("input", "The mesh we want to modify"); params.addRequiredParam<std::string>("combinatorial_geometry", "Function expression encoding a combinatorial geometry"); params.addRequiredParam<subdomain_id_type>("block_id", "Subdomain id to set for inside of the combinatorial"); params.addParam<SubdomainName>("block_name", "Subdomain name to set for inside of the combinatorial"); params.addParam<std::vector<subdomain_id_type>>( "excluded_subdomain_ids", "A set of subdomain ids that will not changed even if " "they are inside/outside the combinatorial geometry"); params.addParam<std::vector<std::string>>( "constant_names", "Vector of constants used in the parsed function (use this for kB etc.)"); params.addParam<std::vector<std::string>>( "constant_expressions", "Vector of values for the constants in constant_names (can be an FParser expression)"); params.addClassDescription( "Uses a parsed expression (`combinatorial_geometry`) to determine if an " "element (via its centroid) is inside the region defined by the expression and " "assigns a new block ID."); return params; } ParsedSubdomainMeshGenerator::ParsedSubdomainMeshGenerator(const InputParameters & parameters) : MeshGenerator(parameters), FunctionParserUtils<false>(parameters), _input(getMesh("input")), _function(parameters.get<std::string>("combinatorial_geometry")), _block_id(parameters.get<SubdomainID>("block_id")), _excluded_ids(parameters.get<std::vector<SubdomainID>>("excluded_subdomain_ids")) { // base function object _func_F = std::make_shared<SymFunction>(); // set FParser internal feature flags setParserFeatureFlags(_func_F); // add the constant expressions addFParserConstants(_func_F, getParam<std::vector<std::string>>("constant_names"), getParam<std::vector<std::string>>("constant_expressions")); // parse function if (_func_F->Parse(_function, "x,y,z") >= 0) mooseError("Invalid function\n", _function, "\nin ParsedSubdomainMeshModifier ", name(), ".\n", _func_F->ErrorMsg()); _func_params.resize(3); } std::unique_ptr<MeshBase> ParsedSubdomainMeshGenerator::generate() { std::unique_ptr<MeshBase> mesh = std::move(_input); // Loop over the elements for (const auto & elem : mesh->active_element_ptr_range()) { _func_params[0] = elem->centroid()(0); _func_params[1] = elem->centroid()(1); _func_params[2] = elem->centroid()(2); bool contains = evaluate(_func_F); if (contains && std::find(_excluded_ids.begin(), _excluded_ids.end(), elem->subdomain_id()) == _excluded_ids.end())<|fim▁hole|> } // Assign block name, if provided if (isParamValid("block_name")) mesh->subdomain_name(_block_id) = getParam<SubdomainName>("block_name"); return dynamic_pointer_cast<MeshBase>(mesh); }<|fim▁end|>
elem->subdomain_id() = _block_id;
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>#[path = "../mod.rs"] mod testsuite; pub use testsuite::*;<|fim▁end|>
// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0. mod test_cdc;
<|file_name|>test_templatetags.py<|end_file_name|><|fim▁begin|>from django.test import TestCase from common.templatetags.verbose_name import verbose_name from users.models import SystersUser class TemplateTagsTestCase(TestCase):<|fim▁hole|> self.assertEqual(verbose_name(SystersUser, "homepage_url"), "Homepage")<|fim▁end|>
def test_verbose_names(self): """Test verbose_name template tag"""
<|file_name|>OrderByBuilderImpl.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software<|fim▁hole|> * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package org.apache.polygene.library.sql.generator.implementation.grammar.builders.query; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Objects; import org.apache.polygene.library.sql.generator.grammar.builders.query.OrderByBuilder; import org.apache.polygene.library.sql.generator.grammar.query.OrderByClause; import org.apache.polygene.library.sql.generator.grammar.query.SortSpecification; import org.apache.polygene.library.sql.generator.implementation.grammar.common.SQLBuilderBase; import org.apache.polygene.library.sql.generator.implementation.grammar.query.OrderByClauseImpl; import org.apache.polygene.library.sql.generator.implementation.transformation.spi.SQLProcessorAggregator; /** * @author Stanislav Muhametsin */ public class OrderByBuilderImpl extends SQLBuilderBase implements OrderByBuilder { private final List<SortSpecification> _sortSpecs; public OrderByBuilderImpl( SQLProcessorAggregator processor ) { super( processor ); this._sortSpecs = new ArrayList<SortSpecification>(); } public OrderByBuilder addSortSpecs( SortSpecification... specs ) { for( SortSpecification spec : specs ) { Objects.requireNonNull( spec, "specification" ); } this._sortSpecs.addAll( Arrays.asList( specs ) ); return this; } public List<SortSpecification> getSortSpecs() { return Collections.unmodifiableList( this._sortSpecs ); } public OrderByClause createExpression() { return new OrderByClauseImpl( this.getProcessor(), this._sortSpecs ); } }<|fim▁end|>
<|file_name|>ArmorHelper.java<|end_file_name|><|fim▁begin|>package mcmod.nxs.animalwarriors.lib; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.item.ItemArmor; public class ArmorHelper extends ItemArmor { public ArmorHelper(ArmorMaterial material, int type, int layer)<|fim▁hole|> super(material, type, layer); } public ItemArmor setNameAndTab(String name, CreativeTabs tab) { this.setTextureName(ResourcePathHelper.getResourcesPath() + name); this.setUnlocalizedName(name); this.setCreativeTab(tab); return this; } }<|fim▁end|>
{
<|file_name|>fullscreen.rs<|end_file_name|><|fim▁begin|>#[cfg(target_os = "android")] #[macro_use] extern crate android_glue; extern crate glutin; use std::io; mod support; #[cfg(target_os = "android")] android_start!(main); #[cfg(not(feature = "window"))] fn main() { println!("This example requires glutin to be compiled with the `window` feature"); } #[cfg(feature = "window")] fn main() { // enumerating monitors let monitor = { for (num, monitor) in glutin::get_available_monitors().enumerate() { println!("Monitor #{}: {:?}", num, monitor.get_name()); } print!("Please write the number of the monitor to use: "); let mut num = String::new(); io::stdin().read_line(&mut num).unwrap(); let num = num.trim().parse().ok().expect("Please enter a number"); let monitor = glutin::get_available_monitors().nth(num).expect("Please enter a valid ID"); println!("Using {:?}", monitor.get_name()); monitor }; let window = glutin::WindowBuilder::new() .with_title("Hello world!".to_string()) .with_fullscreen(monitor) .build()<|fim▁hole|> let context = support::load(&window); while !window.is_closed() { context.draw_frame((0.0, 1.0, 0.0, 1.0)); window.swap_buffers(); println!("{:?}", window.wait_events().next()); } }<|fim▁end|>
.unwrap(); unsafe { window.make_current() };
<|file_name|>i2c_device_test.go<|end_file_name|><|fim▁begin|>package sysfs import ( "os" "testing" "gobot.io/x/gobot/gobottest" ) func TestNewI2cDevice(t *testing.T) { fs := NewMockFilesystem([]string{}) SetFilesystem(fs) i, err := NewI2cDevice(os.DevNull) gobottest.Refute(t, err, nil) fs = NewMockFilesystem([]string{ "/dev/i2c-1", }) SetFilesystem(fs) i, err = NewI2cDevice("/dev/i2c-1") gobottest.Refute(t, err, nil) SetSyscall(&MockSyscall{}) i, err = NewI2cDevice("/dev/i2c-1") var _ I2cDevice = i gobottest.Assert(t, err, nil)<|fim▁hole|> gobottest.Assert(t, i.SetAddress(0xff), nil) buf := []byte{0x01, 0x02, 0x03} n, err := i.Write(buf) gobottest.Assert(t, n, len(buf)) gobottest.Assert(t, err, nil) buf = make([]byte, 4) n, err = i.Read(buf) gobottest.Assert(t, n, 3) gobottest.Assert(t, err, nil) }<|fim▁end|>
<|file_name|>neuron.rs<|end_file_name|><|fim▁begin|>/* This file is part of Mulp. Mulp is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Mulp is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Mulp. If not, see <http://www.gnu.org/licenses/>. */ //use std::num::Float; use std::fmt; use std::rand::{task_rng, Rng, TaskRng}; use logistic_functions; // .abs(), .exp() <|fim▁hole|>// TODO: // // Create the ability to use // - radial basis neuron // - sigma-pi neuron // // delta_weights are used for the retropropagation algorithm // // uniquement pour fonctions logisitques // (plus logique) // // TODO : // - permettre cependant l'utilisation d'autres fonctions ... /// An artificial neuron is represented here pub struct Neuron { /// general /// The number of weights of the neuron (input dimension) nb_input: uint, // ACTUAL DIM = NB INPUT + 1 (BIAIS) /// The weights of the neuron w: Vec<f64>, // weights of f64 /// The net value (ponderated sum) net: f64, //ponderated sum // propagation a: f64, // activation // backpropagation ap: f64, // diff-activation. /!\ Do not use in the output layer momentum: Vec<f64>, // add inertia error: f64, ponderated_error: Vec<f64>, // computed error is passed cross each weights, one for each neuron dimension // to not confound with ponderated_errors from the following neurons layer. //delta_w: Vec<f64> // compute with activation an // CHECK IF WE ACTUALLY HAVE // delta_w <=> ponderated error } impl Neuron { // static method /// Returns a neuron with the number of dimension given /// /// # Arguments /// /// * `nb_input` - An unsigned int which represent the number of dimension of the input vector /// pub fn new(nb_input: uint) -> Neuron { // Propagation let mut w: Vec<f64> = Vec::with_capacity(nb_input); for _ in range(0u, nb_input) { w.push(0.0); } // all 0f64. let net: f64 = 0f64; let a: f64 = 0f64; // Back propagation let ap: f64 = 0f64; let mut momentum: Vec<f64> = Vec::with_capacity(nb_input); for _ in range(0u, nb_input) { momentum.push(0.0); } let error: f64 = 0f64; // do not initialize because, needs a propagation de toda manera. let mut ponderated_error: Vec<f64> = Vec::with_capacity(nb_input); for _ in range(0u, nb_input) { ponderated_error.push(0.0); } return Neuron { nb_input: nb_input, w: w, net: net, a: a, ap: ap, momentum: momentum, error: error, ponderated_error: ponderated_error, }; } // Get parameters pub fn get_dim(&mut self) -> uint { return self.nb_input; } pub fn get_weights(&mut self) -> Vec<f64>{ return self.w.clone(); } pub fn get_net(&mut self) -> f64 { return self.net; } pub fn get_activation(&mut self) -> f64 { return self.a; } pub fn get_ponderated_error(&mut self) -> Vec<f64> { return self.ponderated_error.clone(); } // // INITIALISATION // /// Load existing weights and erase the current ones, /// into the neuron /// /// # Arguments /// /// * `w` - New weights /// pub fn load_weights(&mut self, w: &[f64]){ for j in range (0i, self.nb_input as int) { let i = j as uint; self.w[i] = w[i]; } } /// Initialize weights pub fn initialize_weights(&mut self, task_rng: &mut TaskRng, born: f64) { let mut it = self.w.iter_mut(); loop { match it.next() { Some(w) => { *w = task_rng.gen_range(-born, born); } None => { break } } } } // PROPAGATION // // Should be differentiable // class C2 // input vector is the output of the precedent layer pub fn compute_net(&mut self, input: &[f64]) { self.net = dot_product(input, self.w.as_slice()); } fn phi(&self, x: f64) -> f64 { return logistic_functions::sigmoid(x); } fn phip(&self, x:f64) -> f64 { return logistic_functions::sigmoidp(x); } fn compute_a(&mut self) { self.a = self.phi(self.net); } /// Propagates the input value in the neuron : /// it computes the net value summing the weighted inputs, and then computing the activation. /// /// # Arguments /// /// * `input` - The input vector /// pub fn propagate(&mut self, input: &[f64]) { // checked before if input of the neuron size. self.compute_net(input); self.compute_a(); } // // BACKPROPAGATION // /// Differentiated activation is necessary for retro backpropagation /// /// WARNING : works only with logistic functions fn compute_ap(&mut self) { // because the activation function is a logistic function self.ap = self.phip(self.net); } /// Computes error with the summed ponderated vector error of the following layer. /// A neuron corresponds to a specifical dimension of each neurons of the following layer. /// Thus, there are as many ponderated errors () as those present in the following layer /// /// # Arguments /// /// * `ponderated_error` - The summed ponderated errors /// fn compute_error(&mut self, ponderated_error: f64){ // DONE BY MLP // let sum: f64 = ponderated_errors.iter().fold(0f64, |a, &b| a + b); // self.error = (self.ap)*ponderated_error; // ponderate error for each dim / weight // different from ponderated_errors !! for i in range (0u, self.nb_input) { self.ponderated_error[i] = self.w[i]*self.error; } } /// Back propagation using the gradient descent algorithm. /// Only for hidden layers. /// /// # Arguments /// /// * `ponderated_errors` - It is the ponderated error from the next neurons layer. /// It is the ponderated error of each neurons of the following layer corresponding of the current neuron-dimension. /// /// Do not use with the output layer. /// In the output layer we do not care the activation function derivability /// output_error = target - output. /// Use load_error() and load_activiation () /// pub fn back_propagate(&mut self, ponderated_error: f64){ self.compute_ap(); self.compute_error(ponderated_error); } /// Actualize neuron weights with the computed error and the corresponding input. /// (check if delta_w corresponds to ponderated_error). /// /// # Arguments /// /// * `input` - The input data corresponding of the back propagation. /// * `nu` - The learning rate [0,1], typical 0.3. /// * `mu` - The momentum [0,1], typical 0.9. pub fn actualize_weights(&mut self, input: &[f64], nu: f64, mu: f64) { for i in range (0u, self.nb_input) { self.w[i] += nu*self.error*input[i] + mu*self.momentum[i]; self.momentum[i] = nu*self.error*input[i] + mu*self.momentum[i]; // or // self.w[i] += nu*self.error*input[i] + mu*self.momentum[i]; // self.momentum[i] = nu*self.error*input[i]; } } /// Substitue activation with another. /// It must be only use for softmax computation ! /// /// It gives rights to the neurons layer to modify its neurons activation. /// P.S: it is possible to modify and to make a kind of interface (?) or virtual neuron /// specifical of the output features. pub fn load_activation(&mut self, a: f64) { self.a = a; } /// Load (output - target) precedly computed /// only for the output layer. /// Load error and compute ponderated errors. /// pub fn load_error(&mut self, error: f64) { self.error = error; // and then back propagate accross weights // ponderate error for each dim / weight // different from ponderated_errors !! // // P.S: implant zip functionality ? for i in range (0u, self.nb_input) { self.ponderated_error[i] = self.w[i]*self.error; } } } impl fmt::Show for Neuron { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { return write!(f,"{}",self.w); } } fn dot_product(v0: &[f64], v1: &[f64]) -> f64 { let mut sum = 0f64; let mut it = v0.iter().zip(v1.iter()); loop { match it.next() { Some(z) => { sum += (*z.val0()) * (*z.val1()); } None => { break } } } return sum; } #[test] fn dot_product_test() { let v0 = vec![1.55, 0.0, 42.0, -5.8744, 7.0]; let v1 = vec![-3.9996, 5556.0, -2.0, -5.902, 98.0]; let p = dot_product(v0.as_slice(),v1.as_slice()); let actual_p = 630.47133; let epsilon = 0.00001; assert!(float_extension::is_equal(p,actual_p,epsilon)); } #[test] fn neuron_test () { let mut n: Neuron = Neuron::new(4u); // check if all is well initialized let mut v1: Vec<f64> = Vec::new(); let mut w: Vec<f64> = Vec::new(); // input v1.push(1.0); // biais (v1_0) v1.push(1.6); // v1_1 v1.push(2560.9); // v1_2 v1.push(-0.001); // v1_3 // weights w.push(0.6); // w0 (biais) w.push(-1.2); // w1 w.push(0.035); // w2 w.push(65.64); // w3 n.load_weights(w.as_slice()); // ready to propagate ? assert!(n.get_dim() == 4u); //println!{"weights:\n{}",w}; n.propagate(v1.as_slice()); //assert!(); // ponderated error from the correspondants weigts from the neurons of layer. // summed by mlp class let ponderated_error = 0.6; n.back_propagate(ponderated_error); // compute ap: phip(net) = // computer error // println!("{}", n.ap); // println!("{}", n.error); // println!{"n:\n{}",n}; // because actualisation depends of input let nu = 0.7; let mu = 0.9; n.actualize_weights(v1.as_slice(),nu,mu); let new_w = n.get_weights(); //println!("weights:\n{}", new_w); // println!("{} {}"); } /* fn neuron_test_backpropagation () { let mut n: Neuron = Neuron::new(2u); // check if all is well initialized let mut v1: Vec<f64> = Vec::new(); let mut w: Vec<f64> = Vec::new(); // input v1.push(1.0); // biais (v1_0) v1.push(-5.0); // v1_3 // target = 24.0 // weights w.push(0.6); // w0 (biais) w.push(1.0); // w1 n.load_weights(w.as_slice()); n.propagate(v1.as_slice()); n.back_propagate(2.0); // compute ap // compute ponderated errors } */<|fim▁end|>
// Neuron // McCulloch & Pitts's Neuron //
<|file_name|>editdate.py<|end_file_name|><|fim▁begin|># # Gramps - a GTK+/GNOME based genealogy program # # Copyright (C) 2002-2006 Donald N. Allingham # Copyright (C) 2009 Douglas S. Blank # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # $Id$ """ Date editing module for GRAMPS. The EditDate provides visual feedback to the user via a pixamp to indicate if the associated GtkEntry box contains a valid date. Green means complete and regular date. Yellow means a valid, but not a regular date. Red means that the date is not valid, and will be viewed as a text string instead of a date. The DateEditor provides a dialog in which the date can be unambiguously built using UI controls such as menus and spin buttons. """ #------------------------------------------------------------------------- # # Python modules # #------------------------------------------------------------------------- #------------------------------------------------------------------------- # # set up logging # #------------------------------------------------------------------------- import logging __LOG = logging.getLogger(".EditDate") #------------------------------------------------------------------------- # # GNOME modules # #------------------------------------------------------------------------- from gi.repository import Gtk #------------------------------------------------------------------------- # # gramps modules # #------------------------------------------------------------------------- from gramps.gen.ggettext import sgettext as _ from gramps.gen.lib.date import Date from gramps.gen.datehandler import displayer from gramps.gen.const import URL_MANUAL_PAGE from ..display import display_help from ..managedwindow import ManagedWindow from ..glade import Glade #------------------------------------------------------------------------- # # Constants # #------------------------------------------------------------------------- MOD_TEXT = ( (Date.MOD_NONE , _('Regular')), (Date.MOD_BEFORE , _('Before')), (Date.MOD_AFTER , _('After')), (Date.MOD_ABOUT , _('About')), (Date.MOD_RANGE , _('Range')), (Date.MOD_SPAN , _('Span')), (Date.MOD_TEXTONLY , _('Text only')) ) QUAL_TEXT = ( (Date.QUAL_NONE, _('Regular')), (Date.QUAL_ESTIMATED, _('Estimated')), (Date.QUAL_CALCULATED, _('Calculated')) ) CAL_TO_MONTHS_NAMES = { Date.CAL_GREGORIAN : displayer.short_months, Date.CAL_JULIAN : displayer.short_months, Date.CAL_HEBREW : displayer.hebrew, Date.CAL_FRENCH : displayer.french, Date.CAL_PERSIAN : displayer.persian, Date.CAL_ISLAMIC : displayer.islamic, Date.CAL_SWEDISH : displayer.swedish } WIKI_HELP_PAGE = '%s_-_Entering_and_Editing_Data:_Detailed_-_part_1' % URL_MANUAL_PAGE WIKI_HELP_SEC = _('manual|Editing_Dates') #------------------------------------------------------------------------- # # EditDate # #------------------------------------------------------------------------- class EditDate(ManagedWindow): """<|fim▁hole|> Dialog allowing to build the date precisely, to correct possible limitations of parsing and/or underlying structure of Date. """ def __init__(self, date, uistate, track): """ Initiate and display the dialog. """ ManagedWindow.__init__(self, uistate, track, self) # Create self.date as a copy of the given Date object. self.date = Date(date) self.top = Glade() self.set_window( self.top.toplevel, self.top.get_object('title'), _('Date selection')) self.calendar_box = self.top.get_object('calendar_box') for name in Date.ui_calendar_names: self.calendar_box.get_model().append([name]) self.calendar_box.set_active(self.date.get_calendar()) self.calendar_box.connect('changed', self.switch_calendar) self.quality_box = self.top.get_object('quality_box') for item_number in range(len(QUAL_TEXT)): self.quality_box.append_text(QUAL_TEXT[item_number][1]) if self.date.get_quality() == QUAL_TEXT[item_number][0]: self.quality_box.set_active(item_number) self.type_box = self.top.get_object('type_box') for item_number in range(len(MOD_TEXT)): self.type_box.append_text(MOD_TEXT[item_number][1]) if self.date.get_modifier() == MOD_TEXT[item_number][0]: self.type_box.set_active(item_number) self.type_box.connect('changed', self.switch_type) self.start_month_box = self.top.get_object('start_month_box') self.stop_month_box = self.top.get_object('stop_month_box') month_names = CAL_TO_MONTHS_NAMES[self.date.get_calendar()] for name in month_names: self.start_month_box.append_text(name) self.stop_month_box.append_text(name) self.start_month_box.set_active(self.date.get_month()) self.stop_month_box.set_active(self.date.get_stop_month()) self.start_day = self.top.get_object('start_day') self.start_day.set_value(self.date.get_day()) self.start_year = self.top.get_object('start_year') self.start_year.set_value(self.date.get_year()) self.stop_day = self.top.get_object('stop_day') self.stop_day.set_value(self.date.get_stop_day()) self.stop_year = self.top.get_object('stop_year') self.stop_year.set_value(self.date.get_stop_year()) self.dual_dated = self.top.get_object('dualdated') self.new_year = self.top.get_object('newyear') self.new_year.set_text(self.date.newyear_to_str()) # Disable second date controls if not compound date if not self.date.is_compound(): self.stop_day.set_sensitive(0) self.stop_month_box.set_sensitive(0) self.stop_year.set_sensitive(0) # Disable the rest of controls if a text-only date if self.date.get_modifier() == Date.MOD_TEXTONLY: self.start_day.set_sensitive(0) self.start_month_box.set_sensitive(0) self.start_year.set_sensitive(0) self.calendar_box.set_sensitive(0) self.quality_box.set_sensitive(0) self.dual_dated.set_sensitive(0) self.new_year.set_sensitive(0) self.text_entry = self.top.get_object('date_text_entry') self.text_entry.set_text(self.date.get_text()) if self.date.get_slash(): self.dual_dated.set_active(1) self.calendar_box.set_sensitive(0) self.calendar_box.set_active(Date.CAL_JULIAN) self.dual_dated.connect('toggled', self.switch_dual_dated) # The dialog is modal -- since dates don't have names, we don't # want to have several open dialogs, since then the user will # loose track of which is which. Much like opening files. self.return_date = None self.show() while True: response = self.window.run() if response == Gtk.ResponseType.HELP: display_help(webpage=WIKI_HELP_PAGE, section=WIKI_HELP_SEC) elif response == Gtk.ResponseType.DELETE_EVENT: break else: if response == Gtk.ResponseType.OK: (the_quality, the_modifier, the_calendar, the_value, the_text, the_newyear) = self.build_date_from_ui() self.return_date = Date(self.date) self.return_date.set( quality=the_quality, modifier=the_modifier, calendar=the_calendar, value=the_value, text=the_text, newyear=the_newyear) self.close() break def build_menu_names(self, obj): """ Define the menu entry for the ManagedWindows """ return (_("Date selection"), None) def build_date_from_ui(self): """ Collect information from the UI controls and return 5-tuple of (quality,modifier,calendar,value,text) """ # It is important to not set date based on these controls. # For example, changing the caledar makes the date inconsistent # until the callback of the calendar menu is finished. # We need to be able to use this function from that callback, # so here we just report on the state of all widgets, without # actually modifying the date yet. modifier = MOD_TEXT[self.type_box.get_active()][0] text = self.text_entry.get_text() if modifier == Date.MOD_TEXTONLY: return (Date.QUAL_NONE, Date.MOD_TEXTONLY, Date.CAL_GREGORIAN, Date.EMPTY,text, Date.NEWYEAR_JAN1) quality = QUAL_TEXT[self.quality_box.get_active()][0] if modifier in (Date.MOD_RANGE, Date.MOD_SPAN): value = ( self.start_day.get_value_as_int(), self.start_month_box.get_active(), self.start_year.get_value_as_int(), self.dual_dated.get_active(), self.stop_day.get_value_as_int(), self.stop_month_box.get_active(), self.stop_year.get_value_as_int(), self.dual_dated.get_active()) else: value = ( self.start_day.get_value_as_int(), self.start_month_box.get_active(), self.start_year.get_value_as_int(), self.dual_dated.get_active()) calendar = self.calendar_box.get_active() newyear = Date.newyear_to_code(self.new_year.get_text()) return (quality, modifier, calendar, value, text, newyear) def switch_type(self, obj): """ Disable/enable various date controls depending on the date type selected via the menu. """ the_modifier = MOD_TEXT[self.type_box.get_active()][0] # Disable/enable second date controls based on whether # the type allows compound dates if the_modifier in (Date.MOD_RANGE, Date.MOD_SPAN): stop_date_sensitivity = 1 else: stop_date_sensitivity = 0 self.stop_day.set_sensitive(stop_date_sensitivity) self.stop_month_box.set_sensitive(stop_date_sensitivity) self.stop_year.set_sensitive(stop_date_sensitivity) # Disable/enable the rest of the controls if the type is text-only. date_sensitivity = not the_modifier == Date.MOD_TEXTONLY self.start_day.set_sensitive(date_sensitivity) self.start_month_box.set_sensitive(date_sensitivity) self.start_year.set_sensitive(date_sensitivity) self.calendar_box.set_sensitive(date_sensitivity) self.quality_box.set_sensitive(date_sensitivity) self.dual_dated.set_sensitive(date_sensitivity) self.new_year.set_sensitive(date_sensitivity) def switch_dual_dated(self, obj): """ Changed whether this is a dual dated year, or not. Dual dated years are represented in the Julian calendar so that the day/months don't changed in the Text representation. """ if self.dual_dated.get_active(): self.calendar_box.set_active(Date.CAL_JULIAN) self.calendar_box.set_sensitive(0) else: self.calendar_box.set_sensitive(1) def switch_calendar(self, obj): """ Change month names and convert the date based on the calendar selected via the menu. """ old_cal = self.date.get_calendar() new_cal = self.calendar_box.get_active() (the_quality, the_modifier, the_calendar, the_value, the_text, the_newyear) = self.build_date_from_ui() self.date.set( quality=the_quality, modifier=the_modifier, calendar=old_cal, value=the_value, text=the_text, newyear=the_newyear) if not self.date.is_empty(): self.date.convert_calendar(new_cal) self.start_month_box.get_model().clear() self.stop_month_box.get_model().clear() month_names = CAL_TO_MONTHS_NAMES[new_cal] for name in month_names: self.start_month_box.append_text(name) self.stop_month_box.append_text(name) self.start_day.set_value(self.date.get_day()) self.start_month_box.set_active(self.date.get_month()) self.start_year.set_value(self.date.get_year()) self.stop_day.set_value(self.date.get_stop_day()) self.stop_month_box.set_active(self.date.get_stop_month()) self.stop_year.set_value(self.date.get_stop_year())<|fim▁end|>
<|file_name|>xml-hint.js<|end_file_name|><|fim▁begin|>(function () { "use strict"; <|fim▁hole|> var tags = options && options.schemaInfo; var quote = (options && options.quoteChar) || '"'; if (!tags) return;// logO(tags, 'tags');//gets tags from schema (html in this case) var cur = cm.getCursor(), token = cm.getTokenAt(cur); var inner = CodeMirror.innerMode(cm.getMode(), token.state); if (inner.mode.name != "xml") return;//logO(inner.mode.name,'inner.mode.name'); //(still xml when in attribute quotes) var result = [], replaceToken = false, prefix; var isTag = token.string.charAt(0) == "<"; //Gather custom completions from my plugin //MY CUSTOM CODE var word = token.string;//logO(word, 'word'); if (CodeMirrorCustomCompletions && CodeMirrorCustomCompletions.length > 0) { for (var n = 0; n < CodeMirrorCustomCompletions.length; n++) { var name = CodeMirrorCustomCompletions[n].name; if (name.toLowerCase().indexOf(word) !== -1) { result.push(name); replaceToken = true; //IMPORTANT- I added this for custom completions to work, its possible that it may screw something up! } } }//end of my code //logO(result, 'result'); if (!inner.state.tagName || isTag) { // Tag completion if (isTag) { prefix = token.string.slice(1); replaceToken = true; } var cx = inner.state.context, curTag = cx && tags[cx.tagName]; var childList = cx ? curTag && curTag.children : tags["!top"]; if (childList) { for (var i = 0; i < childList.length; ++i) if (!prefix || childList[i].lastIndexOf(prefix, 0) == 0) result.push("<" + childList[i]); } else { for (var name in tags) if (tags.hasOwnProperty(name) && name != "!top" && (!prefix || name.lastIndexOf(prefix, 0) == 0)) result.push("<" + name); } if (cx && (!prefix || ("/" + cx.tagName).lastIndexOf(prefix, 0) == 0)) result.push("</" + cx.tagName + ">"); } else { // Attribute completion var curTag = tags[inner.state.tagName], attrs = curTag && curTag.attrs; if (!attrs) return; if (token.type == "string" || token.string == "=") { // A value var before = cm.getRange(Pos(cur.line, Math.max(0, cur.ch - 60)), Pos(cur.line, token.type == "string" ? token.start : token.end)); var atName = before.match(/([^\s\u00a0=<>\"\']+)=$/), atValues; if (!atName || !attrs.hasOwnProperty(atName[1]) || !(atValues = attrs[atName[1]])) return; if (typeof atValues == 'function') atValues = atValues.call(this, cm); // Functions can be used to supply values for autocomplete widget if (token.type == "string") { prefix = token.string; if (/['"]/.test(token.string.charAt(0))) { quote = token.string.charAt(0); prefix = token.string.slice(1); } replaceToken = true; } for (var i = 0; i < atValues.length; ++i) if (!prefix || atValues[i].lastIndexOf(prefix, 0) == 0) result.push(quote + atValues[i] + quote); } else { // An attribute name if (token.type == "attribute") { //logO(token, 'token xml-hint attribute'); prefix = token.string; replaceToken = true; } for (var attr in attrs) if (attrs.hasOwnProperty(attr) && (!prefix || attr.lastIndexOf(prefix, 0) == 0)) result.push(attr); } } return { list: result, from: replaceToken ? Pos(cur.line, token.start) : cur, to: replaceToken ? Pos(cur.line, token.end) : cur }; } CodeMirror.xmlHint = getHints; // deprecated CodeMirror.registerHelper("hint", "xml", getHints); })();<|fim▁end|>
var Pos = CodeMirror.Pos; function getHints(cm, options) {
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os.path <<<<<<< HEAD import re import sys ======= import sys import gspread >>>>>>> # This is a combination of 2 commits. ======= <<<<<<< HEAD import re import sys ======= import sys import gspread >>>>>>> # This is a combination of 2 commits. >>>>>>> Update README.md try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit() def read(filename): return open(os.path.join(os.path.dirname(__file__), filename)).read() description = 'Google Spreadsheets Python API' long_description = """ {index} License ------- MIT Download ======== """ long_description = long_description.lstrip("\n").format(index=read('docs/index.txt')) version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', read('gspread/__init__.py'), re.MULTILINE).group(1) setup( name='gspread', packages=['gspread'], description=description, long_description=long_description, version=version, author='Anton Burnashev', author_email='[email protected]', url='https://github.com/burnash/gspread', keywords=['spreadsheets', 'google-spreadsheets'], install_requires=['requests>=2.2.1'], classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop",<|fim▁hole|> "Topic :: Software Development :: Libraries :: Python Modules" ], license='MIT' )<|fim▁end|>
"Intended Audience :: Science/Research", "Topic :: Office/Business :: Financial :: Spreadsheet",
<|file_name|>registering.rs<|end_file_name|><|fim▁begin|>#![cfg(all(feature = "os-poll", feature = "net"))] use std::io::{self, Write}; use std::thread::sleep; use std::time::Duration; use log::{debug, info, trace}; #[cfg(debug_assertions)] use mio::net::UdpSocket; use mio::net::{TcpListener, TcpStream}; use mio::{Events, Interest, Poll, Registry, Token}; mod util; #[cfg(debug_assertions)] use util::assert_error; use util::{any_local_address, init}; const SERVER: Token = Token(0); const CLIENT: Token = Token(1); struct TestHandler { server: TcpListener, client: TcpStream, state: usize, } impl TestHandler { fn new(srv: TcpListener, cli: TcpStream) -> TestHandler { TestHandler { server: srv, client: cli, state: 0, } } fn handle_read(&mut self, registry: &Registry, token: Token) { match token { SERVER => { trace!("handle_read; token=SERVER"); let mut sock = self.server.accept().unwrap().0; if let Err(err) = sock.write(b"foobar") { if err.kind() != io::ErrorKind::WouldBlock { panic!("unexpected error writing to connection: {}", err); } } } CLIENT => { trace!("handle_read; token=CLIENT"); assert!(self.state == 0, "unexpected state {}", self.state); self.state = 1; registry .reregister(&mut self.client, CLIENT, Interest::WRITABLE) .unwrap(); } _ => panic!("unexpected token"), } } fn handle_write(&mut self, registry: &Registry, token: Token) { debug!("handle_write; token={:?}; state={:?}", token, self.state); assert!(token == CLIENT, "unexpected token {:?}", token); assert!(self.state == 1, "unexpected state {}", self.state); self.state = 2; registry.deregister(&mut self.client).unwrap(); registry.deregister(&mut self.server).unwrap(); } } #[test] pub fn register_deregister() { init(); debug!("Starting TEST_REGISTER_DEREGISTER"); let mut poll = Poll::new().unwrap(); let mut events = Events::with_capacity(1024); let mut server = TcpListener::bind(any_local_address()).unwrap(); let addr = server.local_addr().unwrap(); info!("register server socket"); poll.registry() .register(&mut server, SERVER, Interest::READABLE) .unwrap(); let mut client = TcpStream::connect(addr).unwrap(); // Register client socket only as writable poll.registry() .register(&mut client, CLIENT, Interest::READABLE) .unwrap(); let mut handler = TestHandler::new(server, client); loop { poll.poll(&mut events, None).unwrap(); if let Some(event) = events.iter().next() { if event.is_readable() { handler.handle_read(poll.registry(), event.token()); } if event.is_writable() { handler.handle_write(poll.registry(), event.token()); break; } } } poll.poll(&mut events, Some(Duration::from_millis(100))) .unwrap();<|fim▁hole|>#[test] pub fn reregister_different_interest_without_poll() { init(); let mut events = Events::with_capacity(1024); let mut poll = Poll::new().unwrap(); // Create the listener let mut l = TcpListener::bind("127.0.0.1:0".parse().unwrap()).unwrap(); // Register the listener with `Poll` poll.registry() .register(&mut l, Token(0), Interest::READABLE) .unwrap(); let mut s1 = TcpStream::connect(l.local_addr().unwrap()).unwrap(); poll.registry() .register(&mut s1, Token(2), Interest::READABLE) .unwrap(); const TIMEOUT: Duration = Duration::from_millis(200); sleep(TIMEOUT); poll.registry() .reregister(&mut l, Token(0), Interest::WRITABLE) .unwrap(); poll.poll(&mut events, Some(TIMEOUT)).unwrap(); assert!(events.iter().next().is_none()); } #[test] #[cfg(debug_assertions)] // Check is only present when debug assertions are enabled. fn tcp_register_multiple_event_loops() { init(); let mut listener = TcpListener::bind(any_local_address()).unwrap(); let addr = listener.local_addr().unwrap(); let poll1 = Poll::new().unwrap(); poll1 .registry() .register( &mut listener, Token(0), Interest::READABLE | Interest::WRITABLE, ) .unwrap(); let poll2 = Poll::new().unwrap(); // Try registering the same socket with the initial one let res = poll2.registry().register( &mut listener, Token(0), Interest::READABLE | Interest::WRITABLE, ); assert_error(res, "I/O source already registered with a `Registry`"); // Try the stream let mut stream = TcpStream::connect(addr).unwrap(); poll1 .registry() .register( &mut stream, Token(1), Interest::READABLE | Interest::WRITABLE, ) .unwrap(); let res = poll2.registry().register( &mut stream, Token(1), Interest::READABLE | Interest::WRITABLE, ); assert_error(res, "I/O source already registered with a `Registry`"); } #[test] #[cfg(debug_assertions)] // Check is only present when debug assertions are enabled. fn udp_register_multiple_event_loops() { init(); let mut socket = UdpSocket::bind(any_local_address()).unwrap(); let poll1 = Poll::new().unwrap(); poll1 .registry() .register( &mut socket, Token(0), Interest::READABLE | Interest::WRITABLE, ) .unwrap(); let poll2 = Poll::new().unwrap(); // Try registering the same socket with the initial one let res = poll2.registry().register( &mut socket, Token(0), Interest::READABLE | Interest::WRITABLE, ); assert_error(res, "I/O source already registered with a `Registry`"); } #[test] fn registering_after_deregistering() { init(); let mut poll = Poll::new().unwrap(); let mut events = Events::with_capacity(8); let mut server = TcpListener::bind(any_local_address()).unwrap(); poll.registry() .register(&mut server, SERVER, Interest::READABLE) .unwrap(); poll.registry().deregister(&mut server).unwrap(); poll.registry() .register(&mut server, SERVER, Interest::READABLE) .unwrap(); poll.poll(&mut events, Some(Duration::from_millis(100))) .unwrap(); assert!(events.is_empty()); }<|fim▁end|>
assert!(events.iter().next().is_none()); }
<|file_name|>pool.rs<|end_file_name|><|fim▁begin|>use std::{mem, fmt}; use std::sync::atomic::{AtomicUsize, Ordering}; use std::ops::{Index, IndexMut}; use std::marker::PhantomData; use std::collections::LinkedList; use std::ops::{Deref, DerefMut}; use std::ptr; pub mod tests; /// Arc is the only valid way to access an item in /// the pool. It is returned by alloc, and will automatically /// release/retain when dropped/cloned. It implements Deref/DerefMut, /// so all accesses can go through it. /// WARNING! Taking the address of the dereferenced value constitutes /// undefined behavior. So, given a: Arc<T>, &*a is not allowed pub struct Arc<T> { pool: *mut Pool<T>, index: usize, } /// Public functions impl <T> Arc<T> { /// If you want to manually manage the memory or /// use the wrapped reference outside of the Arc system /// the retain/release functions provide an escape hatch. /// Retain will increment the reference count<|fim▁hole|> /// If you want to manually manage the memory or /// use the wrapped reference outside of the Arc system /// the retain/release functions provide an escape hatch. /// Release will decrement the reference count pub unsafe fn release(&mut self) { self.get_pool().release(self.index); } } /// Internal functions impl <T> Arc<T> { /// It's somewhat confusing that Arc::new() /// does not take care of bumping the ref count. /// However, the atomic op for claiming a free slot /// needs to happen before the new() takes place fn new(index: usize, p: &Pool<T>) -> Arc<T> { Arc { pool: unsafe { mem::transmute(p) }, index: index, } } fn get_pool(&self) -> &mut Pool<T> { unsafe { &mut *self.pool } } fn ref_count(&self) -> usize { self.get_pool().header_for(self.index).ref_count.load(Ordering::Relaxed) } } impl <T> Drop for Arc<T> { fn drop(&mut self) { self.get_pool().release(self.index); } } impl <T> Clone for Arc<T> { fn clone(&self) -> Self { self.get_pool().retain(self.index); Arc { pool: self.pool, index: self.index, } } } impl<T> Deref for Arc<T> { type Target = T; fn deref<'b>(&'b self) -> &'b T { &self.get_pool()[self.index] } } impl<T> DerefMut for Arc<T> { fn deref_mut<'b>(&'b mut self) -> &'b mut T { &mut self.get_pool()[self.index] } } impl <T> fmt::Debug for Arc<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Arc{{ offset: {:?}, ref_count: {:?} }}", self.index, self.ref_count()) } } impl <T> PartialEq for Arc<T> { fn eq(&self, other: &Arc<T>) -> bool { if self.index != other.index { false } else { unsafe { self.pool as *const _ == other.pool as *const _ } } } } /// A pool represents a fixed number of ref-counted objects. /// The pool treats all given space as an unallocated /// pool of objects. Each object is prefixed with a header. /// The header is formatted as follows: /// * V1 /// - [0..2] ref_count: u16 /// pub struct Pool<T> { item_type: PhantomData<T>, buffer: *mut u8, buffer_size: usize, capacity: usize, tail: AtomicUsize, // One past the end index // Cached values slot_size: usize, header_size: usize, free_list: LinkedList<usize>, } struct SlotHeader { ref_count: AtomicUsize, } /// Public interface impl <T> Pool<T> { pub fn new(mem: &mut [u8]) -> Pool<T> { let ptr: *mut u8 = mem.as_mut_ptr(); let header_size = mem::size_of::<SlotHeader>(); let slot_size = mem::size_of::<T>() + header_size; Pool { item_type: PhantomData, buffer: ptr, buffer_size: mem.len(), tail: AtomicUsize::new(0), slot_size: slot_size, capacity: mem.len() / slot_size, header_size: header_size, free_list: LinkedList::new(), } } /// Remove all objects from the pool /// and zero the memory pub unsafe fn clear(&mut self) { let mut i = self.buffer.clone(); let end = self.buffer.clone().offset(self.buffer_size as isize); while i != end { *i = 0u8; i = i.offset(1); } } /// Fast copy a slot's contents to a new slot and return /// a pointer to the new slot pub fn alloc_with_contents_of(&mut self, other: &Arc<T>) -> Result<Arc<T>, &'static str> { let index = try!(self.claim_free_index()); unsafe { let from = self.raw_contents_for(other.index); let to = self.raw_contents_for(index); ptr::copy(from, to, mem::size_of::<T>()); } Ok(Arc::new(index, self)) } /// Try to allocate a new item from the pool. /// A mutable reference to the item is returned on success pub fn alloc(&mut self) -> Result<Arc<T>, &'static str> { let index = try!(self.internal_alloc()); Ok(Arc::new(index, self)) } // Increase the ref count for the cell at the given index pub fn retain(&mut self, index: usize) { let h = self.header_for(index); loop { let old = h.ref_count.load(Ordering::Relaxed); let swap = h.ref_count .compare_and_swap(old, old+1, Ordering::Relaxed); if swap == old { break } } } // Decrease the ref count for the cell at the given index pub fn release(&mut self, index: usize) { let mut is_free = false; { // Make the borrow checker happy let h = self.header_for(index); loop { let old = h.ref_count.load(Ordering::Relaxed); assert!(old > 0, "Release called on [{}] which has no refs!", index); let swap = h.ref_count .compare_and_swap(old, old-1, Ordering::Relaxed); if swap == old { if old == 1 { // this was the last reference is_free = true; } break } } } if is_free { self.free_list.push_back(index); } } /// Returns the number of live items. O(1) running time. pub fn live_count(&self) -> usize { self.tail.load(Ordering::Relaxed) - self.free_list.len() } } /// Internal Functions impl <T> Pool<T> { // Returns an item from the free list, or // tries to allocate a new one from the buffer fn claim_free_index(&mut self) -> Result<usize, &'static str> { let index = match self.free_list.pop_front() { Some(i) => i, None => try!(self.push_back_alloc()), }; self.retain(index); Ok(index) } // Internal alloc that does not create an Arc but still claims a slot fn internal_alloc(&mut self) -> Result<usize, &'static str> { let index = try!(self.claim_free_index()); Ok(index) } // Pushes the end of the used space in the buffer back // returns the previous index fn push_back_alloc(&mut self) -> Result<usize, &'static str> { loop { let old_tail = self.tail.load(Ordering::Relaxed); let swap = self.tail.compare_and_swap(old_tail, old_tail+1, Ordering::Relaxed); // If we were the ones to claim this slot, or // we've overrun the buffer, return if old_tail >= self.capacity { return Err("OOM") } else if swap == old_tail { return Ok(old_tail) } } } fn header_for<'a>(&'a mut self, i: usize) -> &'a mut SlotHeader { unsafe { let ptr = self.buffer.clone() .offset((i * self.slot_size) as isize); mem::transmute(ptr) } } fn raw_contents_for<'a>(&'a mut self, i: usize) -> *mut u8 { unsafe { self.buffer.clone() .offset((i * self.slot_size) as isize) .offset(self.header_size as isize) } } } impl <T> Index<usize> for Pool<T> { type Output = T; fn index<'a>(&'a self, i: usize) -> &'a T { unsafe { let ptr = self.buffer.clone() .offset((i * self.slot_size) as isize) .offset(self.header_size as isize); mem::transmute(ptr) } } } impl <T> IndexMut<usize> for Pool<T> { fn index_mut<'a>(&'a mut self, i: usize) -> &'a mut T { unsafe { let ptr = self.buffer.clone() .offset((i * self.slot_size) as isize) .offset(self.header_size as isize); mem::transmute(ptr) } } }<|fim▁end|>
pub unsafe fn retain(&mut self) { self.get_pool().retain(self.index); }
<|file_name|>gregorian.js<|end_file_name|><|fim▁begin|>define( //begin v1.x content { "field-quarter-short-relative+0": "tento štvrťr.", "field-quarter-short-relative+1": "budúci štvrťr.", "dayPeriods-standAlone-abbr-noon": "pol.", "field-tue-relative+-1": "minulý utorok", "field-year": "rok", "dateFormatItem-yw": "w. 'týždeň' 'v' 'roku' y", "dayPeriods-format-abbr-afternoon1": "popol.", "dateFormatItem-Hm": "H:mm", "field-wed-relative+0": "túto stredu", "field-wed-relative+1": "budúcu stredu", "dateFormatItem-ms": "mm:ss", "timeFormat-short": "H:mm", "field-minute": "minúta", "field-tue-narrow-relative+0": "tento ut.", "field-tue-narrow-relative+1": "budúci ut.", "field-thu-short-relative+0": "tento št.", "dateTimeFormat-short": "{1} {0}", "field-thu-short-relative+1": "budúci št.", "field-day-relative+0": "dnes", "field-day-relative+1": "zajtra", "field-day-relative+2": "pozajtra", "field-wed-narrow-relative+-1": "minulú st.", "field-year-narrow": "r.", "field-tue-relative+0": "tento utorok", "field-tue-relative+1": "budúci utorok", "field-second-short": "s", "dayPeriods-format-narrow-am": "AM", "dayPeriods-standAlone-abbr-morning1": "ráno", "dayPeriods-standAlone-abbr-morning2": "dopol.", "dateFormatItem-MMMd": "d. M.", "dayPeriods-format-narrow-morning1": "ráno", "dayPeriods-format-narrow-morning2": "dop.", "dayPeriods-format-abbr-am": "AM", "field-week-relative+0": "tento týždeň", "field-month-relative+0": "tento mesiac", "field-week-relative+1": "budúci týždeň", "field-month-relative+1": "budúci mesiac", "field-sun-narrow-relative+0": "túto ne.", "timeFormat-medium": "H:mm:ss", "field-mon-short-relative+0": "tento pond.", "field-sun-narrow-relative+1": "budúcu ne.", "field-mon-short-relative+1": "budúci pond.", "field-second-relative+0": "teraz", "months-standAlone-narrow": [ "j", "f", "m", "a", "m", "j", "j", "a", "s", "o", "n", "d" ], "dayPeriods-standAlone-wide-noon": "poludnie", "eraNames": [ "pred Kristom", "po Kristovi" ], "dayPeriods-standAlone-abbr-pm": "PM", "field-month-short": "mes.", "dateFormatItem-GyMMMEd": "E, d. M. y G", "field-day": "deň", "dayPeriods-standAlone-wide-night1": "noc", "field-year-relative+-1": "minulý rok", "dayPeriods-format-wide-am": "AM", "field-sat-short-relative+-1": "minulú so.", "dayPeriods-format-narrow-afternoon1": "pop.", "dayPeriods-format-wide-afternoon1": "popoludní", "field-hour-relative+0": "v tejto hodine", "field-wed-relative+-1": "minulú stredu", "dateTimeFormat-medium": "{1}, {0}", "field-sat-narrow-relative+-1": "minulú so.", "field-second": "sekunda", "days-standAlone-narrow": [ "n", "p", "u", "s", "š", "p", "s" ], "dayPeriods-standAlone-narrow-noon": "pol.", "dayPeriods-standAlone-wide-pm": "PM", "dateFormatItem-Ehms": "E h:mm:ss a", "dateFormat-long": "d. MMMM y", "dateFormatItem-GyMMMd": "d. M. y G", "dayPeriods-standAlone-abbr-midnight": "poln.", "field-quarter": "štvrťrok", "field-week-short": "týž.", "dayPeriods-format-narrow-midnight": "o poln.", "dateFormatItem-yMMMEd": "E d. M. y", "quarters-standAlone-wide": [ "1. štvrťrok", "2. štvrťrok", "3. štvrťrok", "4. štvrťrok" ], "days-format-narrow": [ "n", "p", "u", "s", "š", "p", "s" ], "dayPeriods-format-wide-evening1": "večer", "dateTimeFormats-appendItem-Timezone": "{0} {1}", "field-tue-short-relative+0": "tento utor.", "field-tue-short-relative+1": "budúci utor.", "field-mon-relative+-1": "minulý pondelok", "dateFormatItem-GyMMM": "LLLL y G", "field-month": "mesiac", "field-day-narrow": "d.", "dayPeriods-standAlone-abbr-night1": "noc", "dayPeriods-standAlone-narrow-afternoon1": "pop.", "dateFormatItem-MMM": "LLL", "field-minute-short": "min", "field-dayperiod": "časť dňa", "field-sat-short-relative+0": "túto so.", "field-sat-short-relative+1": "budúcu so.", "dayPeriods-format-narrow-pm": "PM", "dateFormat-medium": "d. M. y", "eraAbbr": [ "pred Kr.", "po Kr." ], "quarters-standAlone-abbr": [ "Q1", "Q2", "Q3", "Q4" ], "dayPeriods-format-abbr-pm": "PM", "field-second-narrow": "s", "field-mon-relative+0": "tento pondelok", "dayPeriods-standAlone-narrow-night1": "noc", "field-mon-relative+1": "budúci pondelok", "field-year-short": "r.", "months-format-narrow": [ "j", "f", "m", "a", "m", "j", "j", "a", "s", "o", "n", "d" ], "dayPeriods-format-wide-morning1": "ráno", "dayPeriods-format-wide-morning2": "dopoludnia", "field-quarter-relative+-1": "minulý štvrťrok", "dayPeriods-standAlone-narrow-am": "AM", "days-format-short": [ "ne", "po", "ut", "st", "št", "pi", "so" ], "quarters-format-narrow": [ "1", "2", "3", "4" ], "dayPeriods-format-wide-pm": "PM", "field-sat-relative+-1": "minulú sobotu", "dateTimeFormat-long": "{1}, {0}", "dateFormatItem-Md": "d. M.", "field-hour": "hodina", "dateFormatItem-yQQQQ": "QQQQ y", "months-format-wide": [ "januára", "februára", "marca", "apríla", "mája", "júna", "júla", "augusta", "septembra", "októbra", "novembra", "decembra" ], "dayPeriods-format-wide-night1": "v noci", "dateFormat-full": "EEEE, d. MMMM y", "field-month-relative+-1": "minulý mesiac", "dateFormatItem-Hms": "H:mm:ss", "field-quarter-short": "Q", "field-sat-narrow-relative+0": "túto so.", "dateFormatItem-Hmv": "H:mm v", "field-fri-relative+0": "tento piatok", "field-sat-narrow-relative+1": "budúcu so.", "field-fri-relative+1": "budúci piatok", "dayPeriods-format-narrow-noon": "nap.", "field-sun-short-relative+0": "túto ned.", "field-sun-short-relative+1": "budúcu ned.", "field-week-relative+-1": "minulý týždeň", "field-quarter-short-relative+-1": "minulý štvrťr.", "dateFormatItem-Ehm": "E h:mm a", "months-format-abbr": [ "jan", "feb", "mar", "apr", "máj", "jún", "júl", "aug", "sep", "okt", "nov", "dec" ], "dayPeriods-format-wide-midnight": "o polnoci", "field-quarter-relative+0": "tento štvrťrok", "field-minute-relative+0": "v tejto minúte", "timeFormat-long": "H:mm:ss z", "field-quarter-relative+1": "budúci štvrťrok", "field-wed-short-relative+-1": "minulú str.", "dateFormatItem-yMMM": "M/y", "dateFormat-short": "d. M. y", "field-thu-short-relative+-1": "minulý št.", "dayPeriods-format-abbr-night1": "v noci", "dateFormatItem-MMMMW": "W. 'týždeň' 'v' MMM", "days-standAlone-wide": [ "nedeľa", "pondelok",<|fim▁hole|> "piatok", "sobota" ], "field-mon-narrow-relative+-1": "minulý po.", "dateFormatItem-MMMMd": "d. MMMM", "field-thu-narrow-relative+-1": "minulý št.", "dateFormatItem-E": "ccc", "dateFormatItem-mmss": "mm:ss", "dateFormatItem-H": "H", "field-tue-narrow-relative+-1": "minulý ut.", "dayPeriods-format-abbr-evening1": "večer", "dayPeriods-standAlone-narrow-pm": "PM", "dateFormatItem-M": "L.", "months-standAlone-wide": [ "január", "február", "marec", "apríl", "máj", "jún", "júl", "august", "september", "október", "november", "december" ], "field-wed-short-relative+0": "túto str.", "field-wed-short-relative+1": "budúcu str.", "dateFormatItem-Hmsv": "H:mm:ss v", "field-sun-relative+-1": "minulú nedeľu", "dateFormatItem-MMMMEd": "E d. MMMM", "days-standAlone-abbr": [ "ne", "po", "ut", "st", "št", "pi", "so" ], "dateTimeFormat-full": "{1}, {0}", "dateFormatItem-hm": "h:mm a", "dateFormatItem-d": "d.", "field-weekday": "deň týždňa", "dayPeriods-standAlone-wide-evening1": "večer", "field-quarter-narrow-relative+0": "tento štvrťr.", "field-sat-relative+0": "túto sobotu", "dateFormatItem-h": "h a", "field-quarter-narrow-relative+1": "budúci štvrťr.", "field-sat-relative+1": "budúcu sobotu", "months-standAlone-abbr": [ "jan", "feb", "mar", "apr", "máj", "jún", "júl", "aug", "sep", "okt", "nov", "dec" ], "dateFormatItem-hmsv": "h:mm:ss a v", "dayPeriods-format-abbr-morning1": "ráno", "dayPeriods-standAlone-narrow-evening1": "več.", "dayPeriods-format-abbr-morning2": "dopol.", "timeFormat-full": "H:mm:ss zzzz", "dateFormatItem-MEd": "E d. M.", "dateFormatItem-y": "y", "field-thu-narrow-relative+0": "tento št.", "field-sun-narrow-relative+-1": "minulú ne.", "field-mon-short-relative+-1": "minulý pond.", "field-thu-narrow-relative+1": "budúci št.", "field-thu-relative+0": "tento štvrtok", "dayPeriods-standAlone-abbr-afternoon1": "popol.", "field-thu-relative+1": "budúci štvrtok", "dateFormatItem-hms": "h:mm:ss a", "field-fri-short-relative+-1": "minulý pi.", "dateFormatItem-hmv": "h:mm a v", "dayPeriods-format-abbr-noon": "napol.", "field-thu-relative+-1": "minulý štvrtok", "dateFormatItem-yMd": "d. M. y", "quarters-standAlone-narrow": [ "1", "2", "3", "4" ], "field-week": "týždeň", "quarters-format-wide": [ "1. štvrťrok", "2. štvrťrok", "3. štvrťrok", "4. štvrťrok" ], "dateFormatItem-Ed": "E d.", "field-wed-narrow-relative+0": "túto st.", "field-wed-narrow-relative+1": "budúcu st.", "dayPeriods-standAlone-wide-morning1": "ráno", "dayPeriods-standAlone-wide-morning2": "dopoludnie", "field-quarter-narrow-relative+-1": "minulý štvrťr.", "field-fri-short-relative+0": "tento pi.", "field-fri-short-relative+1": "budúci pi.", "days-standAlone-short": [ "ne", "po", "ut", "st", "št", "pi", "so" ], "dayPeriods-standAlone-narrow-morning1": "ráno", "dayPeriods-standAlone-narrow-morning2": "dop.", "dateFormatItem-GyMMMMd": "d. M. y G", "dayPeriods-format-abbr-midnight": "o poln.", "field-hour-short": "h", "quarters-format-abbr": [ "Q1", "Q2", "Q3", "Q4" ], "field-month-narrow": "mes.", "field-hour-narrow": "h", "field-fri-narrow-relative+-1": "minulý pi.", "field-year-relative+0": "tento rok", "field-year-relative+1": "budúci rok", "field-fri-relative+-1": "minulý piatok", "eraNarrow": [ "pred Kr.", "po Kr." ], "dayPeriods-format-wide-noon": "napoludnie", "field-tue-short-relative+-1": "minulý utor.", "field-minute-narrow": "min", "dayPeriods-standAlone-wide-afternoon1": "popoludnie", "dateFormatItem-yQQQ": "QQQ y", "days-format-wide": [ "nedeľa", "pondelok", "utorok", "streda", "štvrtok", "piatok", "sobota" ], "dateFormatItem-yMMMMd": "d. MMMM y", "field-mon-narrow-relative+0": "tento po.", "dateFormatItem-EHm": "E HH:mm", "field-mon-narrow-relative+1": "budúci po.", "dayPeriods-standAlone-wide-midnight": "polnoc", "dateFormatItem-yM": "M/y", "field-zone": "časové pásmo", "dateFormatItem-yMMMM": "LLLL y", "dateFormatItem-MMMEd": "E d. M.", "dateFormatItem-EHms": "E HH:mm:ss", "dateFormatItem-yMEd": "E d. M. y", "dayPeriods-standAlone-narrow-midnight": "poln.", "field-quarter-narrow": "Q", "dayPeriods-standAlone-abbr-am": "AM", "field-day-relative+-1": "včera", "field-sun-short-relative+-1": "minulú ned.", "field-day-relative+-2": "predvčerom", "dayPeriods-format-narrow-night1": "v n.", "days-format-abbr": [ "ne", "po", "ut", "st", "št", "pi", "so" ], "field-sun-relative+0": "túto nedeľu", "field-sun-relative+1": "budúcu nedeľu", "dateFormatItem-yMMMd": "d. M. y", "dateFormatItem-Gy": "y G", "field-era": "letopočet", "field-week-narrow": "týž.", "field-day-short": "d.", "dayPeriods-standAlone-abbr-evening1": "večer", "field-fri-narrow-relative+0": "tento pi.", "dayPeriods-format-narrow-evening1": "več.", "field-fri-narrow-relative+1": "budúci pi.", "dayPeriods-standAlone-wide-am": "AM" } //end v1.x content );<|fim▁end|>
"utorok", "streda", "štvrtok",
<|file_name|>EmptyMain.tsx<|end_file_name|><|fim▁begin|>/** * * @license * Copyright (C) 2017 Joseph Roque * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @author Joseph Roque * @created 2017-08-26 * @file EmptyMain.tsx<|fim▁hole|>// React imports import React from 'react'; import { StyleSheet, View, } from 'react-native'; // Imports import * as Constants from '../../constants'; export function renderEmptyMain(): JSX.Element { return ( <View style={_styles.container}> <View style={_styles.header} /> <View style={_styles.separator} /> <View style={_styles.innerContainer} /> <View style={_styles.tabBar} /> </View> ); } // Private styles for component const _styles = StyleSheet.create({ container: { backgroundColor: Constants.Colors.primaryBackground, flex: 1, }, header: { backgroundColor: Constants.Colors.primaryBackground, height: 45, }, innerContainer: { backgroundColor: Constants.Colors.darkTransparentBackground, flex: 1, }, separator: { backgroundColor: Constants.Colors.tertiaryBackground, height: StyleSheet.hairlineWidth, }, tabBar: { backgroundColor: Constants.Colors.tertiaryBackground, borderTopColor: 'rgba(0, 0, 0, 0.25)', borderTopWidth: 1, height: 55, }, });<|fim▁end|>
* @description Displays a false version of the Main view while the app loads in the background */ 'use strict';
<|file_name|>operations.go<|end_file_name|><|fim▁begin|>package storage // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "context" "github.com/Azure/go-autorest/autorest" "github.com/Azure/go-autorest/autorest/azure" "github.com/Azure/go-autorest/tracing" "net/http" ) // OperationsClient is the the Azure Storage Management API. type OperationsClient struct { BaseClient } // NewOperationsClient creates an instance of the OperationsClient client. func NewOperationsClient(subscriptionID string) OperationsClient { return NewOperationsClientWithBaseURI(DefaultBaseURI, subscriptionID) } // NewOperationsClientWithBaseURI creates an instance of the OperationsClient client using a custom endpoint. Use this // when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). func NewOperationsClientWithBaseURI(baseURI string, subscriptionID string) OperationsClient { return OperationsClient{NewWithBaseURI(baseURI, subscriptionID)} } // List lists all of the available Storage Rest API operations. func (client OperationsClient) List(ctx context.Context) (result OperationListResult, err error) {<|fim▁hole|> ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.ListPreparer(ctx) if err != nil { err = autorest.NewErrorWithError(err, "storage.OperationsClient", "List", nil, "Failure preparing request") return } resp, err := client.ListSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "storage.OperationsClient", "List", resp, "Failure sending request") return } result, err = client.ListResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "storage.OperationsClient", "List", resp, "Failure responding to request") return } return } // ListPreparer prepares the List request. func (client OperationsClient) ListPreparer(ctx context.Context) (*http.Request, error) { const APIVersion = "2017-06-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPath("/providers/Microsoft.Storage/operations"), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListSender sends the List request. The method will close the // http.Response Body if it receives an error. func (client OperationsClient) ListSender(req *http.Request) (*http.Response, error) { return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) } // ListResponder handles the response to the List request. The method always // closes the http.Response Body. func (client OperationsClient) ListResponder(resp *http.Response) (result OperationListResult, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return }<|fim▁end|>
if tracing.IsEnabled() {
<|file_name|>introspect_panel.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import ibus import dbus<|fim▁hole|>e = ibus.interface.IPanel() print e.Introspect("/", bus)<|fim▁end|>
bus = dbus.SessionBus()
<|file_name|>index.js<|end_file_name|><|fim▁begin|>// Declare internals <|fim▁hole|> // Plugin registration exports.register = function (plugin, options, next) { plugin.route({ path: '/test2', method: 'GET', handler: function (request, reply) { reply('testing123'); } }); plugin.route({ path: '/test2/path', method: 'GET', handler: function (request, reply) { reply(plugin.path); } }); plugin.log('test', 'abc'); return next(); };<|fim▁end|>
var internals = {};
<|file_name|>dot.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2015, The Radare Project. All rights reserved. // See the COPYING file at the top-level directory of this distribution. // Licensed under the BSD 3-Clause License: // <http://opensource.org/licenses/BSD-3-Clause> // This file may not be copied, modified, or distributed // except according to those terms. //! Graph visualization traits and functions to emit dot code. use std::cmp::Eq; use std::collections::HashMap; use std::fmt::Debug; use std::hash::Hash; use petgraph::graph::NodeIndex; #[allow(unused_macros)] macro_rules! add_strings { ( $( $x: expr ),* ) => { { let mut s = String::new(); $( s.push_str(&format!("{}", $x)); )* s } }; } /// Represents the contents of a `GraphViz` attribute block pub enum DotAttrBlock { /// The attribute block as string including the surrounding square brackets. /// Values have to be escaped manually. Raw(String), /// List of key-value pairs. /// Values will be escaped for you. Attributes(Vec<(String, String)>), /// Represents one line in a dot file Hybrid(String, Vec<(String, String)>), } impl DotAttrBlock { fn bake(&mut self) -> &String { let mut r = String::new(); let attr = if let DotAttrBlock::Hybrid(ref s, ref attr) = *self { r.push_str(s); attr.clone() } else { Vec::new() }; if !attr.is_empty() { *self = DotAttrBlock::Attributes(attr); } let s: String = match *self { DotAttrBlock::Raw(ref l) => return l, DotAttrBlock::Attributes(ref attrs) => { if attrs.is_empty() { "".to_owned() } else { let mut t = " [".to_string(); for &(ref k, ref v) in attrs { t.push_str(&*format!(" {}={}", k, v)); } t.push_str(" ]"); t } } _ => unreachable!(), }; r.push_str(&s); r.push_str(";\n"); *self = DotAttrBlock::Raw(r); if let DotAttrBlock::Raw(ref r) = *self { return r; } unreachable!(); } } pub trait Index { fn to_index(&self) -> usize; } impl Index for NodeIndex { fn to_index(&self) -> usize { self.index() } } /// This trait enables graphs to be generated from implementors. pub trait GraphDot { type NodeIndex: Hash + Clone + Eq + Index + Debug; type EdgeIndex: Hash + Clone + Eq; fn node_index_new(usize) -> Self::NodeIndex; fn edge_index_new(usize) -> Self::EdgeIndex; fn configure(&self) -> String; fn node_count(&self) -> usize; fn edge_count(&self) -> usize; fn nodes(&self) -> Vec<Self::NodeIndex>; fn edges(&self) -> Vec<Self::EdgeIndex>; // fn get_node(&self, n: usize) -> Option<&Self::NodeType>; /// Nodes with the same node_cluster return value will be put in the same /// graphviz-cluster.<|fim▁hole|> } fn node_skip(&self, &Self::NodeIndex) -> bool { false } fn node_attrs(&self, &Self::NodeIndex) -> DotAttrBlock; fn edge_skip(&self, &Self::EdgeIndex) -> bool { false } fn edge_attrs(&self, &Self::EdgeIndex) -> DotAttrBlock; fn edge_source(&self, &Self::EdgeIndex) -> Self::NodeIndex; fn edge_target(&self, &Self::EdgeIndex) -> Self::NodeIndex; } pub fn emit_dot<T: GraphDot>(g: &T) -> String { let mut result = String::new(); result.push_str(&*g.configure()); // Node configurations { let nodes = g.nodes(); let mut clustermap = HashMap::<T::NodeIndex, Vec<T::NodeIndex>>::new(); for i in &nodes { let block = g.node_cluster(i).unwrap_or_else(|| { radeco_err!("Block not found"); 0 }); clustermap .entry(T::node_index_new(block)) .or_insert_with(Vec::new) .push(i.clone()); } for (k, v) in &clustermap { result.push_str(&*format!("subgraph cluster_{} {{\n", k.to_index())); result.push_str("style=filled;\n"); result.push_str("fillcolor=gray;\n"); result.push_str("rankdir=TB;\n"); for node in v.iter() { result.push_str(&*g.node_attrs(node).bake()); } result.push_str("}\n"); } } // Connect nodes by edges. for edge_i in g.edges() { if g.edge_skip(&edge_i) { continue; } result.push_str(g.edge_attrs(&edge_i).bake()); } result.push_str("\n}\n"); result }<|fim▁end|>
fn node_cluster(&self, _: &Self::NodeIndex) -> Option<usize> { Some(0)
<|file_name|>writer.go<|end_file_name|><|fim▁begin|>package goyaml2 import (<|fim▁hole|>) func Write(w io.Writer, v interface{}) error { return nil }<|fim▁end|>
"io"
<|file_name|>contact.py<|end_file_name|><|fim▁begin|>"""Basic contact management functions. Contacts are linked to monitors and are used to determine where to send alerts for monitors. Contacts are basic name/email/phone sets. Contacts are only stored in the database and not in memory, they are loaded from the database each time an alert is sent. """ from typing import Dict, Iterable, Optional, Any, Set from irisett.sql import DBConnection, Cursor from irisett import ( errors, object_models, ) from irisett.object_exists import ( contact_exists, active_monitor_exists, contact_group_exists, ) async def create_contact(dbcon: DBConnection, name: Optional[str], email: Optional[str], phone: Optional[str], active: bool) -> str: """Add a contact to the database.""" q = """insert into contacts (name, email, phone, active) values (%s, %s, %s, %s)""" q_args = (name, email, phone, active) contact_id = await dbcon.operation(q, q_args) return contact_id async def update_contact(dbcon: DBConnection, contact_id: int, data: Dict[str, str]) -> None: """Update a contacts information in the database. Data is a dict with name/email/phone/active values that will be updated. """ async def _run(cur: Cursor) -> None: for key, value in data.items(): if key not in ['name', 'email', 'phone', 'active']: raise errors.IrisettError('invalid contact key %s' % key) q = """update contacts set %s=%%s where id=%%s""" % key q_args = (value, contact_id) await cur.execute(q, q_args) if not await contact_exists(dbcon, contact_id): raise errors.InvalidArguments('contact does not exist') await dbcon.transact(_run) async def delete_contact(dbcon: DBConnection, contact_id: int) -> None: """Remove a contact from the database.""" if not await contact_exists(dbcon, contact_id): raise errors.InvalidArguments('contact does not exist') q = """delete from contacts where id=%s""" await dbcon.operation(q, (contact_id,)) async def create_contact_group(dbcon: DBConnection, name: str, active: bool) -> str: """Add a contact group to the database.""" q = """insert into contact_groups (name, active) values (%s, %s)""" q_args = (name, active) contact_group_id = await dbcon.operation(q, q_args) return contact_group_id async def update_contact_group(dbcon: DBConnection, contact_group_id: int, data: Dict[str, str]) -> None: """Update a contact groups information in the database. Data is a dict with name/active values that will be updated. """ async def _run(cur: Cursor) -> None: for key, value in data.items(): if key not in ['name', 'active']:<|fim▁hole|> raise errors.IrisettError('invalid contact key %s' % key) q = """update contact_groups set %s=%%s where id=%%s""" % key q_args = (value, contact_group_id) await cur.execute(q, q_args) if not await contact_group_exists(dbcon, contact_group_id): raise errors.InvalidArguments('contact group does not exist') await dbcon.transact(_run) async def delete_contact_group(dbcon: DBConnection, contact_group_id: int) -> None: """Remove a contact group from the database.""" if not await contact_group_exists(dbcon, contact_group_id): raise errors.InvalidArguments('contact group does not exist') q = """delete from contact_groups where id=%s""" await dbcon.operation(q, (contact_group_id,)) async def get_all_contacts_for_active_monitor(dbcon: DBConnection, monitor_id: int) -> Iterable[object_models.Contact]: """Get a list of all contacts for an active monitor. This includes directly attached contacts, contacts from contact groups, monitor groups etc. """ contacts = set() contacts.update(await _active_monitor_contacts(dbcon, monitor_id)) contacts.update(await _active_monitor_contact_groups(dbcon, monitor_id)) contacts.update(await _active_monitor_monitor_group_contacts(dbcon, monitor_id)) contacts.update(await _active_monitor_monitor_group_contact_groups(dbcon, monitor_id)) return list(contacts) async def _active_monitor_contacts(dbcon: DBConnection, monitor_id: int) -> Set[object_models.Contact]: # Get contacts directly connected to the monitor. q = """select contacts.id, contacts.name, contacts.email, contacts.phone, contacts.active from active_monitor_contacts, contacts where active_monitor_contacts.active_monitor_id = %s and active_monitor_contacts.contact_id = contacts.id and contacts.active = true""" return {object_models.Contact(*row) for row in await dbcon.fetch_all(q, (monitor_id,))} async def _active_monitor_contact_groups(dbcon: DBConnection, monitor_id: int) -> Set[object_models.Contact]: # Get contacts connected to the monitor via a contact group. q = """select contacts.id, contacts.name, contacts.email, contacts.phone, contacts.active from active_monitor_contact_groups, contact_groups, contact_group_contacts, contacts where active_monitor_contact_groups.active_monitor_id = %s and active_monitor_contact_groups.contact_group_id = contact_groups.id and contact_groups.active = true and contact_groups.id = contact_group_contacts.contact_group_id and contact_group_contacts.contact_id = contacts.id and contacts.active = true""" return {object_models.Contact(*row) for row in await dbcon.fetch_all(q, (monitor_id,))} async def _active_monitor_monitor_group_contacts(dbcon: DBConnection, monitor_id: int) -> Set[object_models.Contact]: # Get contacts connected to the monitor via monitor group -> contacts q = """select contacts.id, contacts.name, contacts.email, contacts.phone, contacts.active from monitor_group_active_monitors left join monitor_groups on monitor_group_active_monitors.monitor_group_id=monitor_groups.id left join monitor_group_contacts on monitor_group_contacts.monitor_group_id=monitor_groups.id left join contacts on contacts.id=monitor_group_contacts.contact_id where monitor_group_active_monitors.active_monitor_id=%s and contacts.active = true""" return {object_models.Contact(*row) for row in await dbcon.fetch_all(q, (monitor_id,))} async def _active_monitor_monitor_group_contact_groups( dbcon: DBConnection, monitor_id: int) -> Set[object_models.Contact]: # Get contacts connected to the monitor via monitor group -> contact group -> contacts q = """select contacts.id, contacts.name, contacts.email, contacts.phone, contacts.active from monitor_group_active_monitors left join monitor_groups on monitor_group_active_monitors.monitor_group_id=monitor_groups.id left join monitor_group_contact_groups on monitor_group_contact_groups.monitor_group_id=monitor_groups.id left join contact_groups on contact_groups.id=monitor_group_contact_groups.contact_group_id left join contact_group_contacts on contact_group_contacts.contact_group_id=contact_groups.id left join contacts on contacts.id=contact_group_contacts.contact_id where monitor_group_active_monitors.active_monitor_id=%s and contact_groups.active=true and contacts.active=true""" return {object_models.Contact(*row) for row in await dbcon.fetch_all(q, (monitor_id,))} async def get_contact_dict_for_active_monitor(dbcon: DBConnection, monitor_id: int) -> Dict[str, set]: """Get all contact addresses/numbers for a specific active monitor. Return: Dict[str, Set(str)] for 'email' and 'phone'. """ ret = { 'email': set(), 'phone': set(), } # type: Dict[str, set] contacts = await get_all_contacts_for_active_monitor(dbcon, monitor_id) for contact in contacts: if contact.email: ret['email'].add(contact.email) if contact.phone: ret['phone'].add(contact.phone) return ret async def add_contact_to_active_monitor(dbcon: DBConnection, contact_id: int, monitor_id: int) -> None: """Connect a contact and an active monitor.""" if not await active_monitor_exists(dbcon, monitor_id): raise errors.InvalidArguments('monitor does not exist') if not await contact_exists(dbcon, contact_id): raise errors.InvalidArguments('contact does not exist') q = """replace into active_monitor_contacts (active_monitor_id, contact_id) values (%s, %s)""" q_args = (monitor_id, contact_id) await dbcon.operation(q, q_args) async def delete_contact_from_active_monitor(dbcon: DBConnection, contact_id: int, monitor_id: int) -> None: """Disconnect a contact and an active monitor.""" q = """delete from active_monitor_contacts where active_monitor_id=%s and contact_id=%s""" q_args = (monitor_id, contact_id) await dbcon.operation(q, q_args) async def set_active_monitor_contacts(dbcon: DBConnection, contact_ids: Iterable[int], monitor_id: int): """(Re-)set contacts for an active monitor. Delete existing contacts for an active monitor and set the given new contacts. """ async def _run(cur: Cursor) -> None: q = """delete from active_monitor_contacts where active_monitor_id=%s""" await cur.execute(q, (monitor_id,)) for contact_id in contact_ids: q = """insert into active_monitor_contacts (active_monitor_id, contact_id) values (%s, %s)""" q_args = (monitor_id, contact_id) await cur.execute(q, q_args) if not await active_monitor_exists(dbcon, monitor_id): raise errors.InvalidArguments('monitor does not exist') await dbcon.transact(_run) async def get_contacts_for_active_monitor(dbcon: DBConnection, monitor_id: int) -> Iterable[object_models.Contact]: """Get contacts for an active monitor. Return a list of dicts, one dict describing each contacts information. """ q = """select contacts.id, contacts.name, contacts.email, contacts.phone, contacts.active from active_monitor_contacts, contacts where active_monitor_contacts.active_monitor_id = %s and active_monitor_contacts.contact_id = contacts.id""" contacts = [object_models.Contact(*row) for row in await dbcon.fetch_all(q, (monitor_id,))] return contacts async def add_contact_group_to_active_monitor(dbcon: DBConnection, contact_group_id: int, monitor_id: int) -> None: """Connect a contact group and an active monitor.""" if not await active_monitor_exists(dbcon, monitor_id): raise errors.InvalidArguments('monitor does not exist') if not await contact_group_exists(dbcon, contact_group_id): raise errors.InvalidArguments('contact does not exist') q = """replace into active_monitor_contact_groups (active_monitor_id, contact_group_id) values (%s, %s)""" q_args = (monitor_id, contact_group_id) await dbcon.operation(q, q_args) async def delete_contact_group_from_active_monitor(dbcon: DBConnection, contact_group_id: int, monitor_id: int) -> None: """Disconnect a contact group and an active monitor.""" q = """delete from active_monitor_contact_groups where active_monitor_id=%s and contact_group_id=%s""" q_args = (monitor_id, contact_group_id) await dbcon.operation(q, q_args) async def set_active_monitor_contact_groups(dbcon: DBConnection, contact_group_ids: Iterable[int], monitor_id: int) -> None: """(Re-)set contact_groups for an active monitor. Delete existing contact groups for an active monitor and set the given new contact groups. """ async def _run(cur: Cursor) -> None: q = """delete from active_monitor_contact_groups where active_monitor_id=%s""" await cur.execute(q, (monitor_id,)) for contact_group_id in contact_group_ids: q = """insert into active_monitor_contact_groups (active_monitor_id, contact_group_id) values (%s, %s)""" q_args = (monitor_id, contact_group_id) await cur.execute(q, q_args) if not await active_monitor_exists(dbcon, monitor_id): raise errors.InvalidArguments('monitor does not exist') await dbcon.transact(_run) async def get_contact_groups_for_active_monitor( dbcon: DBConnection, monitor_id: int) -> Iterable[object_models.ContactGroup]: """Get contact groups for an active monitor.""" q = """select contact_groups.id, contact_groups.name, contact_groups.active from active_monitor_contact_groups, contact_groups where active_monitor_contact_groups.active_monitor_id = %s and active_monitor_contact_groups.contact_group_id = contact_groups.id""" return [object_models.ContactGroup(*row) for row in await dbcon.fetch_all(q, (monitor_id,))] async def get_all_contacts(dbcon: DBConnection) -> Iterable[object_models.Contact]: """Get all contacts""" q = """select id, name, email, phone, active from contacts""" return [object_models.Contact(*row) for row in await dbcon.fetch_all(q)] async def get_contact(dbcon: DBConnection, id: int) -> Any: # Use any because optional returns suck. """Get a single contact if it exists.""" q = """select id, name, email, phone, active from contacts where id=%s""" q_args = (id,) row = await dbcon.fetch_row(q, q_args) contact = None if row: contact = object_models.Contact(*row) return contact async def get_contacts_for_metadata( dbcon: DBConnection, meta_key: str, meta_value: str) -> Iterable[object_models.Contact]: q = """select c.id, c.name, c.email, c.phone, c.active from contacts as c, object_metadata as meta where meta.key=%s and meta.value=%s and meta.object_type="contact" and meta.object_id=c.id""" q_args = (meta_key, meta_value) return [object_models.Contact(*row) for row in await dbcon.fetch_all(q, q_args)] async def add_contact_to_contact_group(dbcon: DBConnection, contact_group_id: int, contact_id: int) -> None: """Connect a contact and a contact group.""" if not await contact_group_exists(dbcon, contact_group_id): raise errors.InvalidArguments('contact group does not exist') if not await contact_exists(dbcon, contact_id): raise errors.InvalidArguments('contact does not exist') q = """replace into contact_group_contacts (contact_group_id, contact_id) values (%s, %s)""" q_args = (contact_group_id, contact_id) await dbcon.operation(q, q_args) async def delete_contact_from_contact_group(dbcon: DBConnection, contact_group_id: int, contact_id: int) -> None: """Disconnect a contact and a contact_group.""" q = """delete from contact_group_contacts where contact_group_id=%s and contact_id=%s""" q_args = (contact_group_id, contact_id) await dbcon.operation(q, q_args) async def set_contact_group_contacts(dbcon: DBConnection, contact_group_id: int, contact_ids: Iterable[int]) -> None: """(Re-)set contacts for a contact group. Delete existing contacts for a contact group and set the given new contacts. """ async def _run(cur: Cursor) -> None: q = """delete from contact_group_contacts where contact_group_id=%s""" await cur.execute(q, (contact_group_id,)) for contact_id in contact_ids: q = """insert into contact_group_contacts (contact_group_id, contact_id) values (%s, %s)""" q_args = (contact_group_id, contact_id) await cur.execute(q, q_args) if not await contact_group_exists(dbcon, contact_group_id): raise errors.InvalidArguments('contact group does not exist') await dbcon.transact(_run) async def get_contacts_for_contact_group(dbcon: DBConnection, contact_group_id: int) -> Iterable[object_models.Contact]: """Get contacts for a contact group.""" q = """select contacts.id, contacts.name, contacts.email, contacts.phone, contacts.active from contact_group_contacts, contacts where contact_group_contacts.contact_group_id = %s and contact_group_contacts.contact_id = contacts.id""" return [object_models.Contact(*row) for row in await dbcon.fetch_all(q, (contact_group_id,))] async def get_all_contact_groups(dbcon: DBConnection) -> Iterable[object_models.ContactGroup]: q = """select id, name, active from contact_groups""" contact_groups = [object_models.ContactGroup(*row) for row in await dbcon.fetch_all(q)] return contact_groups async def get_contact_group(dbcon: DBConnection, id: int) -> Any: # Use any because optional returns suck. """Get a single contact if it exists. Return a list of dicts, one dict describing each contacts information. """ q = """select id, name, active from contact_groups where id=%s""" row = await dbcon.fetch_row(q, (id,)) contact = None if row: contact = object_models.ContactGroup(*row) return contact async def get_contact_groups_for_metadata( dbcon: DBConnection, meta_key: str, meta_value: str) -> Iterable[object_models.ContactGroup]: q = """select cg.id, cg.name, cg.active from contact_groups as cg, object_metadata as meta where meta.key=%s and meta.value=%s and meta.object_type="contact_group" and meta.object_id=cg.id""" q_args = (meta_key, meta_value) return [object_models.ContactGroup(*row) for row in await dbcon.fetch_all(q, q_args)]<|fim▁end|>
<|file_name|>synology.py<|end_file_name|><|fim▁begin|>import json import traceback from couchpotato.core._base.downloader.main import DownloaderBase from couchpotato.core.helpers.encoding import isInt from couchpotato.core.helpers.variable import cleanHost from couchpotato.core.logger import CPLog import requests log = CPLog(__name__) autoload = 'Synology' class Synology(DownloaderBase): protocol = ['nzb', 'torrent', 'torrent_magnet'] status_support = False def download(self, data = None, media = None, filedata = None): """ Send a torrent/nzb file to the downloader :param data: dict returned from provider Contains the release information :param media: media dict with information Used for creating the filename when possible :param filedata: downloaded torrent/nzb filedata The file gets downloaded in the searcher and send to this function This is done to have fail checking before using the downloader, so the downloader doesn't need to worry about that :return: boolean One fail returns false, but the downloader should log his own errors """ if not media: media = {} if not data: data = {} response = False log.info('Sending "%s" (%s) to Synology.', (data['name'], data['protocol'])) # Load host from config and split out port. host = cleanHost(self.conf('host'), protocol = False).split(':') if not isInt(host[1]): log.error('Config properties are not filled in correctly, port is missing.')<|fim▁hole|> srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password'), self.conf('destination')) if data['protocol'] == 'torrent_magnet': log.info('Adding torrent URL %s', data['url']) response = srpc.create_task(url = data['url']) elif data['protocol'] in ['nzb', 'torrent']: log.info('Adding %s' % data['protocol']) if not filedata: log.error('No %s data found', data['protocol']) else: filename = data['name'] + '.' + data['protocol'] response = srpc.create_task(filename = filename, filedata = filedata) except: log.error('Exception while adding torrent: %s', traceback.format_exc()) finally: return self.downloadReturnId('') if response else False def test(self): """ Check if connection works :return: bool """ host = cleanHost(self.conf('host'), protocol = False).split(':') try: srpc = SynologyRPC(host[0], host[1], self.conf('username'), self.conf('password')) test_result = srpc.test() except: return False return test_result def getEnabledProtocol(self): if self.conf('use_for') == 'both': return super(Synology, self).getEnabledProtocol() elif self.conf('use_for') == 'torrent': return ['torrent', 'torrent_magnet'] else: return ['nzb'] def isEnabled(self, manual = False, data = None): if not data: data = {} for_protocol = ['both'] if data and 'torrent' in data.get('protocol'): for_protocol.append('torrent') elif data: for_protocol.append(data.get('protocol')) return super(Synology, self).isEnabled(manual, data) and\ ((self.conf('use_for') in for_protocol)) class SynologyRPC(object): """SynologyRPC lite library""" def __init__(self, host = 'localhost', port = 5000, username = None, password = None, destination = None): super(SynologyRPC, self).__init__() self.download_url = 'http://%s:%s/webapi/DownloadStation/task.cgi' % (host, port) self.auth_url = 'http://%s:%s/webapi/auth.cgi' % (host, port) self.sid = None self.username = username self.password = password self.destination = destination self.session_name = 'DownloadStation' def _login(self): if self.username and self.password: args = {'api': 'SYNO.API.Auth', 'account': self.username, 'passwd': self.password, 'version': 2, 'method': 'login', 'session': self.session_name, 'format': 'sid'} response = self._req(self.auth_url, args) if response['success']: self.sid = response['data']['sid'] log.debug('sid=%s', self.sid) else: log.error('Couldn\'t log into Synology, %s', response) return response['success'] else: log.error('User or password missing, not using authentication.') return False def _logout(self): args = {'api':'SYNO.API.Auth', 'version':1, 'method':'logout', 'session':self.session_name, '_sid':self.sid} return self._req(self.auth_url, args) def _req(self, url, args, files = None): response = {'success': False} try: req = requests.post(url, data = args, files = files, verify = False) req.raise_for_status() response = json.loads(req.text) if response['success']: log.info('Synology action successfull') return response except requests.ConnectionError as err: log.error('Synology connection error, check your config %s', err) except requests.HTTPError as err: log.error('SynologyRPC HTTPError: %s', err) except Exception as err: log.error('Exception: %s', err) finally: return response def create_task(self, url = None, filename = None, filedata = None): """ Creates new download task in Synology DownloadStation. Either specify url or pair (filename, filedata). Returns True if task was created, False otherwise """ result = False # login if self._login(): args = {'api': 'SYNO.DownloadStation.Task', 'version': '1', 'method': 'create', '_sid': self.sid} if self.destination and len(self.destination) > 0: args['destination'] = self.destination if url: log.info('Login success, adding torrent URI') args['uri'] = url response = self._req(self.download_url, args = args) if response['success']: log.info('Response: %s', response) else: log.error('Response: %s', response) synoerrortype = { 400 : 'File upload failed', 401 : 'Max number of tasks reached', 402 : 'Destination denied', 403 : 'Destination does not exist', 404 : 'Invalid task id', 405 : 'Invalid task action', 406 : 'No default destination', 407 : 'Set destination failed', 408 : 'File does not exist' } log.error('DownloadStation returned the following error : %s', synoerrortype[response['error']['code']]) result = response['success'] elif filename and filedata: log.info('Login success, adding torrent') files = {'file': (filename, filedata)} response = self._req(self.download_url, args = args, files = files) log.info('Response: %s', response) result = response['success'] else: log.error('Invalid use of SynologyRPC.create_task: either url or filename+filedata must be specified') self._logout() return result def test(self): return bool(self._login()) config = [{ 'name': 'synology', 'groups': [ { 'tab': 'downloaders', 'list': 'download_providers', 'name': 'synology', 'label': 'Synology', 'description': 'Use <a href="https://www.synology.com/en-us/dsm/app_packages/DownloadStation" target="_blank">Synology Download Station</a> to download.', 'wizard': True, 'options': [ { 'name': 'enabled', 'default': 0, 'type': 'enabler', 'radio_group': 'nzb,torrent', }, { 'name': 'host', 'default': 'localhost:5000', 'description': 'Hostname with port. Usually <strong>localhost:5000</strong>', }, { 'name': 'username', }, { 'name': 'password', 'type': 'password', }, { 'name': 'destination', 'description': 'Specify <strong>existing</strong> destination share to where your files will be downloaded, usually <strong>Downloads</strong>', 'advanced': True, }, { 'name': 'use_for', 'label': 'Use for', 'default': 'both', 'type': 'dropdown', 'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrent', 'torrent')], }, { 'name': 'manual', 'default': 0, 'type': 'bool', 'advanced': True, 'description': 'Disable this downloader for automated searches, but use it when I manually send a release.', }, ], } ], }]<|fim▁end|>
return False try: # Send request to Synology
<|file_name|>auth-guard.service.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core'; import { ActivatedRouteSnapshot, RouterStateSnapshot, Router, Route, CanActivate, CanActivateChild, CanLoad } from '@angular/router'; import { AuthService } from './auth.service'; @Injectable() export  class AuthGuard implements CanActivate, CanActivateChild, CanLoad { constructor(private authService: AuthService, private router: Router) { } canActivate(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): boolean { console.log('In canActivate: ' + state.url); return this.checkLoggedIn(state.url); } canActivateChild(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): boolean { console.log('In canActivateChild: ' + state.url); return this.checkLoggedIn(state.url); } canLoad(route: Route): boolean {<|fim▁hole|> checkLoggedIn(url: string): boolean { if (this.authService.isLoggedIn()) { return true; } // Retain the attempted URL for redirection this.authService.redirectUrl = url; this.router.navigate(['/login']); return false; } }<|fim▁end|>
console.log('In canLoad: ' + route.path); return this.checkLoggedIn(route.path); }
<|file_name|>test_contents.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals, absolute_import import six from sys import platform import locale import os.path from pelican.tests.support import unittest, get_settings from pelican.contents import Page, Article, Static, URLWrapper, Author, Category from pelican.settings import DEFAULT_CONFIG from pelican.utils import path_to_url, truncate_html_words, SafeDatetime, posix_join from pelican.signals import content_object_init from jinja2.utils import generate_lorem_ipsum # generate one paragraph, enclosed with <p> TEST_CONTENT = str(generate_lorem_ipsum(n=1)) TEST_SUMMARY = generate_lorem_ipsum(n=1, html=False) class TestPage(unittest.TestCase): def setUp(self): super(TestPage, self).setUp() self.old_locale = locale.setlocale(locale.LC_ALL) locale.setlocale(locale.LC_ALL, str('C')) self.page_kwargs = { 'content': TEST_CONTENT, 'context': { 'localsiteurl': '', }, 'metadata': { 'summary': TEST_SUMMARY, 'title': 'foo bar', 'author': Author('Blogger', DEFAULT_CONFIG), }, 'source_path': '/path/to/file/foo.ext' } def tearDown(self): locale.setlocale(locale.LC_ALL, self.old_locale) def test_use_args(self): # Creating a page with arguments passed to the constructor should use # them to initialise object's attributes. metadata = {'foo': 'bar', 'foobar': 'baz', 'title': 'foobar', } page = Page(TEST_CONTENT, metadata=metadata, context={'localsiteurl': ''}) for key, value in metadata.items(): self.assertTrue(hasattr(page, key)) self.assertEqual(value, getattr(page, key)) self.assertEqual(page.content, TEST_CONTENT) def test_mandatory_properties(self): # If the title is not set, must throw an exception. page = Page('content') with self.assertRaises(NameError): page.check_properties() page = Page('content', metadata={'title': 'foobar'}) page.check_properties() def test_summary_from_metadata(self): # If a :summary: metadata is given, it should be used page = Page(**self.page_kwargs) self.assertEqual(page.summary, TEST_SUMMARY) def test_summary_max_length(self): # If a :SUMMARY_MAX_LENGTH: is set, and there is no other summary, # generated summary should not exceed the given length. page_kwargs = self._copy_page_kwargs() settings = get_settings() page_kwargs['settings'] = settings del page_kwargs['metadata']['summary'] settings['SUMMARY_MAX_LENGTH'] = None page = Page(**page_kwargs) self.assertEqual(page.summary, TEST_CONTENT) settings['SUMMARY_MAX_LENGTH'] = 10 page = Page(**page_kwargs) self.assertEqual(page.summary, truncate_html_words(TEST_CONTENT, 10)) settings['SUMMARY_MAX_LENGTH'] = 0 page = Page(**page_kwargs) self.assertEqual(page.summary, '') def test_slug(self): page_kwargs = self._copy_page_kwargs() settings = get_settings() page_kwargs['settings'] = settings settings['SLUGIFY_SOURCE'] = "title" page = Page(**page_kwargs) self.assertEqual(page.slug, 'foo-bar') settings['SLUGIFY_SOURCE'] = "basename" page = Page(**page_kwargs) self.assertEqual(page.slug, 'foo') def test_defaultlang(self): # If no lang is given, default to the default one. page = Page(**self.page_kwargs) self.assertEqual(page.lang, DEFAULT_CONFIG['DEFAULT_LANG']) # it is possible to specify the lang in the metadata infos self.page_kwargs['metadata'].update({'lang': 'fr', }) page = Page(**self.page_kwargs) self.assertEqual(page.lang, 'fr') def test_save_as(self): # If a lang is not the default lang, save_as should be set # accordingly. # if a title is defined, save_as should be set page = Page(**self.page_kwargs) self.assertEqual(page.save_as, "pages/foo-bar.html") # if a language is defined, save_as should include it accordingly self.page_kwargs['metadata'].update({'lang': 'fr', }) page = Page(**self.page_kwargs) self.assertEqual(page.save_as, "pages/foo-bar-fr.html") def test_metadata_url_format(self): # Arbitrary metadata should be passed through url_format() page = Page(**self.page_kwargs) self.assertIn('summary', page.url_format.keys()) page.metadata['directory'] = 'test-dir' page.settings = get_settings(PAGE_SAVE_AS='{directory}/{slug}') self.assertEqual(page.save_as, 'test-dir/foo-bar') def test_datetime(self): # If DATETIME is set to a tuple, it should be used to override LOCALE dt = SafeDatetime(2015, 9, 13) page_kwargs = self._copy_page_kwargs() # set its date to dt page_kwargs['metadata']['date'] = dt page = Page(**page_kwargs) # page.locale_date is a unicode string in both python2 and python3 dt_date = dt.strftime(DEFAULT_CONFIG['DEFAULT_DATE_FORMAT']) # dt_date is a byte string in python2, and a unicode string in python3 # Let's make sure it is a unicode string (relies on python 3.3 supporting the u prefix) if type(dt_date) != type(u''): # python2: dt_date = unicode(dt_date, 'utf8') self.assertEqual(page.locale_date, dt_date ) page_kwargs['settings'] = get_settings() # I doubt this can work on all platforms ... if platform == "win32": locale = 'jpn' else: locale = 'ja_JP.utf8' page_kwargs['settings']['DATE_FORMATS'] = {'jp': (locale, '%Y-%m-%d(%a)')} page_kwargs['metadata']['lang'] = 'jp' import locale as locale_module try: page = Page(**page_kwargs) self.assertEqual(page.locale_date, '2015-09-13(\u65e5)') except locale_module.Error: # The constructor of ``Page`` will try to set the locale to # ``ja_JP.utf8``. But this attempt will failed when there is no # such locale in the system. You can see which locales there are # in your system with ``locale -a`` command. # # Until we find some other method to test this functionality, we # will simply skip this test. unittest.skip("There is no locale %s in this system." % locale) def test_template(self): # Pages default to page, metadata overwrites default_page = Page(**self.page_kwargs) self.assertEqual('page', default_page.template) page_kwargs = self._copy_page_kwargs() page_kwargs['metadata']['template'] = 'custom' custom_page = Page(**page_kwargs) self.assertEqual('custom', custom_page.template) def _copy_page_kwargs(self): # make a deep copy of page_kwargs page_kwargs = dict([(key, self.page_kwargs[key]) for key in self.page_kwargs]) for key in page_kwargs: if not isinstance(page_kwargs[key], dict): break page_kwargs[key] = dict([(subkey, page_kwargs[key][subkey]) for subkey in page_kwargs[key]]) return page_kwargs def test_signal(self): # If a title is given, it should be used to generate the slug. def receiver_test_function(sender, instance): pass content_object_init.connect(receiver_test_function, sender=Page) Page(**self.page_kwargs) self.assertTrue(content_object_init.has_receivers_for(Page)) def test_get_content(self): # Test that the content is updated with the relative links to # filenames, tags and categories. settings = get_settings() args = self.page_kwargs.copy() args['settings'] = settings # Tag args['content'] = ('A simple test, with a ' '<a href="|tag|tagname">link</a>') page = Page(**args) content = page.get_content('http://notmyidea.org') self.assertEqual( content, ('A simple test, with a ' '<a href="http://notmyidea.org/tag/tagname.html">link</a>')) # Category args['content'] = ('A simple test, with a ' '<a href="|category|category">link</a>') page = Page(**args) content = page.get_content('http://notmyidea.org') self.assertEqual( content, ('A simple test, with a ' '<a href="http://notmyidea.org/category/category.html">link</a>')) def test_intrasite_link(self): # type does not take unicode in PY2 and bytes in PY3, which in # combination with unicode literals leads to following insane line: cls_name = '_DummyArticle' if six.PY3 else b'_DummyArticle' article = type(cls_name, (object,), {'url': 'article.html'}) args = self.page_kwargs.copy() args['settings'] = get_settings() args['source_path'] = 'content' args['context']['filenames'] = {'article.rst': article} # Classic intrasite link via filename args['content'] = ( 'A simple test, with a ' '<a href="|filename|article.rst">link</a>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'A simple test, with a ' '<a href="http://notmyidea.org/article.html">link</a>' ) # fragment args['content'] = ( 'A simple test, with a ' '<a href="|filename|article.rst#section-2">link</a>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'A simple test, with a ' '<a href="http://notmyidea.org/article.html#section-2">link</a>' ) # query args['content'] = ( 'A simple test, with a ' '<a href="|filename|article.rst' '?utm_whatever=234&highlight=word">link</a>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'A simple test, with a ' '<a href="http://notmyidea.org/article.html' '?utm_whatever=234&highlight=word">link</a>' ) # combination args['content'] = ( 'A simple test, with a ' '<a href="|filename|article.rst' '?utm_whatever=234&highlight=word#section-2">link</a>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'A simple test, with a ' '<a href="http://notmyidea.org/article.html' '?utm_whatever=234&highlight=word#section-2">link</a>' ) def test_intrasite_link_more(self): # type does not take unicode in PY2 and bytes in PY3, which in # combination with unicode literals leads to following insane line: cls_name = '_DummyAsset' if six.PY3 else b'_DummyAsset' args = self.page_kwargs.copy() args['settings'] = get_settings() args['source_path'] = 'content' args['context']['filenames'] = { 'images/poster.jpg': type(cls_name, (object,), {'url': 'images/poster.jpg'}), 'assets/video.mp4': type(cls_name, (object,), {'url': 'assets/video.mp4'}), 'images/graph.svg': type(cls_name, (object,), {'url': 'images/graph.svg'}), 'reference.rst': type(cls_name, (object,), {'url': 'reference.html'}), } # video.poster args['content'] = ( 'There is a video with poster ' '<video controls poster="{filename}/images/poster.jpg">' '<source src="|filename|/assets/video.mp4" type="video/mp4">' '</video>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'There is a video with poster ' '<video controls poster="http://notmyidea.org/images/poster.jpg">' '<source src="http://notmyidea.org/assets/video.mp4" type="video/mp4">' '</video>' ) # object.data args['content'] = ( 'There is a svg object ' '<object data="{filename}/images/graph.svg" type="image/svg+xml"></object>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'There is a svg object ' '<object data="http://notmyidea.org/images/graph.svg" type="image/svg+xml"></object>' ) # blockquote.cite args['content'] = ( 'There is a blockquote with cite attribute ' '<blockquote cite="{filename}reference.rst">blah blah</blockquote>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'There is a blockquote with cite attribute ' '<blockquote cite="http://notmyidea.org/reference.html">blah blah</blockquote>' ) def test_intrasite_link_markdown_spaces(self): # Markdown introduces %20 instead of spaces, this tests that # we support markdown doing this. cls_name = '_DummyArticle' if six.PY3 else b'_DummyArticle' article = type(cls_name, (object,), {'url': 'article-spaces.html'}) args = self.page_kwargs.copy() args['settings'] = get_settings() args['source_path'] = 'content' args['context']['filenames'] = {'article spaces.rst': article} # An intrasite link via filename with %20 as a space args['content'] = ( 'A simple test, with a ' '<a href="|filename|article%20spaces.rst">link</a>' ) content = Page(**args).get_content('http://notmyidea.org') self.assertEqual( content, 'A simple test, with a ' '<a href="http://notmyidea.org/article-spaces.html">link</a>' ) def test_multiple_authors(self): """Test article with multiple authors.""" args = self.page_kwargs.copy() content = Page(**args) assert content.authors == [content.author] args['metadata'].pop('author') args['metadata']['authors'] = [Author('First Author', DEFAULT_CONFIG), Author('Second Author', DEFAULT_CONFIG)] content = Page(**args) assert content.authors assert content.author == content.authors[0] class TestArticle(TestPage): def test_template(self): # Articles default to article, metadata overwrites default_article = Article(**self.page_kwargs) self.assertEqual('article', default_article.template) article_kwargs = self._copy_page_kwargs() article_kwargs['metadata']['template'] = 'custom' custom_article = Article(**article_kwargs) self.assertEqual('custom', custom_article.template) def test_slugify_category_author(self): settings = get_settings() settings['SLUG_SUBSTITUTIONS'] = [ ('C#', 'csharp') ] settings['ARTICLE_URL'] = '{author}/{category}/{slug}/' settings['ARTICLE_SAVE_AS'] = '{author}/{category}/{slug}/index.html' article_kwargs = self._copy_page_kwargs() article_kwargs['metadata']['author'] = Author("O'Brien", settings) article_kwargs['metadata']['category'] = Category('C# & stuff', settings) article_kwargs['metadata']['title'] = 'fnord' article_kwargs['settings'] = settings article = Article(**article_kwargs) self.assertEqual(article.url, 'obrien/csharp-stuff/fnord/') self.assertEqual(article.save_as, 'obrien/csharp-stuff/fnord/index.html') class TestStatic(unittest.TestCase): def setUp(self): self.settings = get_settings( STATIC_SAVE_AS='{path}', STATIC_URL='{path}', PAGE_SAVE_AS=os.path.join('outpages', '{slug}.html'), PAGE_URL='outpages/{slug}.html') self.context = self.settings.copy() self.static = Static(content=None, metadata={}, settings=self.settings, source_path=posix_join('dir', 'foo.jpg'), context=self.context) self.context['filenames'] = {self.static.source_path: self.static} def tearDown(self): pass def test_attach_to_same_dir(self): """attach_to() overrides a static file's save_as and url. """ page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'fakepage.md')) self.static.attach_to(page) expected_save_as = os.path.join('outpages', 'foo.jpg') self.assertEqual(self.static.save_as, expected_save_as) self.assertEqual(self.static.url, path_to_url(expected_save_as)) def test_attach_to_parent_dir(self): """attach_to() preserves dirs inside the linking document dir. """ page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path='fakepage.md') self.static.attach_to(page) expected_save_as = os.path.join('outpages', 'dir', 'foo.jpg') self.assertEqual(self.static.save_as, expected_save_as) self.assertEqual(self.static.url, path_to_url(expected_save_as)) def test_attach_to_other_dir(self): """attach_to() ignores dirs outside the linking document dir. """ page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'otherdir', 'fakepage.md')) self.static.attach_to(page) expected_save_as = os.path.join('outpages', 'foo.jpg') self.assertEqual(self.static.save_as, expected_save_as) self.assertEqual(self.static.url, path_to_url(expected_save_as)) def test_attach_to_ignores_subsequent_calls(self): """attach_to() does nothing when called a second time. """ page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'fakepage.md')) self.static.attach_to(page) otherdir_settings = self.settings.copy() otherdir_settings.update(dict( PAGE_SAVE_AS=os.path.join('otherpages', '{slug}.html'), PAGE_URL='otherpages/{slug}.html')) otherdir_page = Page(content="other page", metadata={'title': 'otherpage'}, settings=otherdir_settings, source_path=os.path.join('dir', 'otherpage.md')) self.static.attach_to(otherdir_page) otherdir_save_as = os.path.join('otherpages', 'foo.jpg') self.assertNotEqual(self.static.save_as, otherdir_save_as) self.assertNotEqual(self.static.url, path_to_url(otherdir_save_as)) def test_attach_to_does_nothing_after_save_as_referenced(self): """attach_to() does nothing if the save_as was already referenced. (For example, by a {filename} link an a document processed earlier.) """ original_save_as = self.static.save_as page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'fakepage.md')) self.static.attach_to(page) self.assertEqual(self.static.save_as, original_save_as) self.assertEqual(self.static.url, path_to_url(original_save_as)) def test_attach_to_does_nothing_after_url_referenced(self): """attach_to() does nothing if the url was already referenced. (For example, by a {filename} link an a document processed earlier.) """ original_url = self.static.url page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'fakepage.md')) self.static.attach_to(page) self.assertEqual(self.static.save_as, self.static.source_path) self.assertEqual(self.static.url, original_url) def test_attach_to_does_not_override_an_override(self): """attach_to() does not override paths that were overridden elsewhere. (For example, by the user with EXTRA_PATH_METADATA) """ customstatic = Static(content=None, metadata=dict(save_as='customfoo.jpg', url='customfoo.jpg'), settings=self.settings, source_path=os.path.join('dir', 'foo.jpg'), context=self.settings.copy()) page = Page(content="fake page", metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'fakepage.md')) customstatic.attach_to(page) self.assertEqual(customstatic.save_as, 'customfoo.jpg') self.assertEqual(customstatic.url, 'customfoo.jpg') def test_attach_link_syntax(self): """{attach} link syntax triggers output path override & url replacement. """ html = '<a href="{attach}../foo.jpg">link</a>' page = Page(content=html, metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'otherdir', 'fakepage.md'), context=self.context) content = page.get_content('') self.assertNotEqual(content, html, "{attach} link syntax did not trigger URL replacement.") expected_save_as = os.path.join('outpages', 'foo.jpg') self.assertEqual(self.static.save_as, expected_save_as) self.assertEqual(self.static.url, path_to_url(expected_save_as)) def test_tag_link_syntax(self): "{tag} link syntax triggers url replacement." html = '<a href="{tag}foo">link</a>' page = Page( content=html, metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'otherdir', 'fakepage.md'), context=self.context) content = page.get_content('') self.assertNotEqual(content, html) def test_category_link_syntax(self): "{category} link syntax triggers url replacement." html = '<a href="{category}foo">link</a>' page = Page(content=html, metadata={'title': 'fakepage'}, settings=self.settings, source_path=os.path.join('dir', 'otherdir', 'fakepage.md'), context=self.context) content = page.get_content('') self.assertNotEqual(content, html) class TestURLWrapper(unittest.TestCase): def test_comparisons(self): # URLWrappers are sorted by name wrapper_a = URLWrapper(name='first', settings={}) wrapper_b = URLWrapper(name='last', settings={}) self.assertFalse(wrapper_a > wrapper_b)<|fim▁hole|> self.assertTrue(wrapper_a != wrapper_b) self.assertTrue(wrapper_a <= wrapper_b) self.assertTrue(wrapper_a < wrapper_b) wrapper_b.name = 'first' self.assertFalse(wrapper_a > wrapper_b) self.assertTrue(wrapper_a >= wrapper_b) self.assertTrue(wrapper_a == wrapper_b) self.assertFalse(wrapper_a != wrapper_b) self.assertTrue(wrapper_a <= wrapper_b) self.assertFalse(wrapper_a < wrapper_b) wrapper_a.name = 'last' self.assertTrue(wrapper_a > wrapper_b) self.assertTrue(wrapper_a >= wrapper_b) self.assertFalse(wrapper_a == wrapper_b) self.assertTrue(wrapper_a != wrapper_b) self.assertFalse(wrapper_a <= wrapper_b) self.assertFalse(wrapper_a < wrapper_b)<|fim▁end|>
self.assertFalse(wrapper_a >= wrapper_b) self.assertFalse(wrapper_a == wrapper_b)
<|file_name|>visavis-chess.js<|end_file_name|><|fim▁begin|>Dagaz.Controller.persistense = "setup"; Dagaz.Model.WIDTH = 8; Dagaz.Model.HEIGHT = 8; ZRF = { JUMP: 0, IF: 1, FORK: 2, FUNCTION: 3, IN_ZONE: 4, FLAG: 5, SET_FLAG: 6, POS_FLAG: 7, SET_POS_FLAG: 8, ATTR: 9, SET_ATTR: 10, PROMOTE: 11, MODE: 12, ON_BOARD_DIR: 13, ON_BOARD_POS: 14, PARAM: 15, LITERAL: 16, VERIFY: 20 }; Dagaz.Model.BuildDesign = function(design) { design.checkVersion("z2j", "2"); design.checkVersion("animate-captures", "false"); design.checkVersion("smart-moves", "false"); design.checkVersion("show-blink", "false"); design.checkVersion("show-hints", "false"); design.addDirection("w"); // 0 design.addDirection("e"); // 1 design.addDirection("s"); // 2 design.addDirection("ne"); // 3 design.addDirection("n"); // 4 design.addDirection("se"); // 5 design.addDirection("sw"); // 6 design.addDirection("nw"); // 7 design.addPlayer("White", [1, 0, 4, 6, 2, 7, 3, 5]); design.addPlayer("Black", [0, 1, 4, 5, 2, 3, 7, 6]); design.addPosition("a8", [0, 1, 8, 0, 0, 9, 0, 0]); design.addPosition("b8", [-1, 1, 8, 0, 0, 9, 7, 0]); design.addPosition("c8", [-1, 1, 8, 0, 0, 9, 7, 0]); design.addPosition("d8", [-1, 1, 8, 0, 0, 9, 7, 0]); design.addPosition("e8", [-1, 1, 8, 0, 0, 9, 7, 0]); design.addPosition("f8", [-1, 1, 8, 0, 0, 9, 7, 0]); design.addPosition("g8", [-1, 1, 8, 0, 0, 9, 7, 0]); design.addPosition("h8", [-1, 0, 8, 0, 0, 0, 7, 0]); design.addPosition("a7", [0, 1, 8, -7, -8, 9, 0, 0]); design.addPosition("b7", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("c7", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("d7", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("e7", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("f7", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("g7", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("h7", [-1, 0, 8, 0, -8, 0, 7, -9]); design.addPosition("a6", [0, 1, 8, -7, -8, 9, 0, 0]); design.addPosition("b6", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("c6", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("d6", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("e6", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("f6", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("g6", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("h6", [-1, 0, 8, 0, -8, 0, 7, -9]); design.addPosition("a5", [0, 1, 8, -7, -8, 9, 0, 0]); design.addPosition("b5", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("c5", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("d5", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("e5", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("f5", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("g5", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("h5", [-1, 0, 8, 0, -8, 0, 7, -9]); design.addPosition("a4", [0, 1, 8, -7, -8, 9, 0, 0]); design.addPosition("b4", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("c4", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("d4", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("e4", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("f4", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("g4", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("h4", [-1, 0, 8, 0, -8, 0, 7, -9]); design.addPosition("a3", [0, 1, 8, -7, -8, 9, 0, 0]); design.addPosition("b3", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("c3", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("d3", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("e3", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("f3", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("g3", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("h3", [-1, 0, 8, 0, -8, 0, 7, -9]); design.addPosition("a2", [0, 1, 8, -7, -8, 9, 0, 0]); design.addPosition("b2", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("c2", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("d2", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("e2", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("f2", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("g2", [-1, 1, 8, -7, -8, 9, 7, -9]); design.addPosition("h2", [-1, 0, 8, 0, -8, 0, 7, -9]); design.addPosition("a1", [0, 1, 0, -7, -8, 0, 0, 0]); design.addPosition("b1", [-1, 1, 0, -7, -8, 0, 0, -9]); design.addPosition("c1", [-1, 1, 0, -7, -8, 0, 0, -9]); design.addPosition("d1", [-1, 1, 0, -7, -8, 0, 0, -9]); design.addPosition("e1", [-1, 1, 0, -7, -8, 0, 0, -9]); design.addPosition("f1", [-1, 1, 0, -7, -8, 0, 0, -9]); design.addPosition("g1", [-1, 1, 0, -7, -8, 0, 0, -9]); design.addPosition("h1", [-1, 0, 0, 0, -8, 0, 0, -9]); design.addPosition("X1", [0, 0, 0, 0, 0, 0, 0, 0]); design.addPosition("X2", [0, 0, 0, 0, 0, 0, 0, 0]); design.addPosition("X3", [0, 0, 0, 0, 0, 0, 0, 0]); design.addPosition("X4", [0, 0, 0, 0, 0, 0, 0, 0]); design.addZone("last-rank", 1, [0, 1, 2, 3, 4, 5, 6, 7]); design.addZone("last-rank", 2, [56, 57, 58, 59, 60, 61, 62, 63]); design.addZone("third-rank", 1, [40, 41, 42, 43, 44, 45, 46, 47]); design.addZone("third-rank", 2, [16, 17, 18, 19, 20, 21, 22, 23]); design.addZone("black", 1, [56, 58, 60, 62, 49, 51, 53, 55, 40, 42, 44, 46, 33, 35, 37, 39, 24, 26, 28, 30, 17, 19, 21, 23, 8, 10, 12, 14, 1, 3, 5, 7]); design.addZone("black", 2, [56, 58, 60, 62, 49, 51, 53, 55, 40, 42, 44, 46, 33, 35, 37, 39, 24, 26, 28, 30, 17, 19, 21, 23, 8, 10, 12, 14, 1, 3, 5, 7]); design.addZone("home", 1, [56, 57, 58, 59, 60, 61, 62, 63, 48, 49, 50, 51, 52, 53, 54, 55]); design.addZone("home", 2, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); design.addCommand(0, ZRF.FUNCTION, 24); // from design.addCommand(0, ZRF.PARAM, 0); // $1 design.addCommand(0, ZRF.FUNCTION, 22); // navigate design.addCommand(0, ZRF.FUNCTION, 1); // empty? design.addCommand(0, ZRF.FUNCTION, 20); // verify design.addCommand(0, ZRF.IN_ZONE, 0); // last-rank design.addCommand(0, ZRF.FUNCTION, 0); // not design.addCommand(0, ZRF.IF, 4); design.addCommand(0, ZRF.PROMOTE, 4); // Queen design.addCommand(0, ZRF.FUNCTION, 25); // to design.addCommand(0, ZRF.JUMP, 2); design.addCommand(0, ZRF.FUNCTION, 25); // to design.addCommand(0, ZRF.FUNCTION, 28); // end design.addCommand(1, ZRF.FUNCTION, 24); // from design.addCommand(1, ZRF.PARAM, 0); // $1 design.addCommand(1, ZRF.FUNCTION, 22); // navigate design.addCommand(1, ZRF.FUNCTION, 1); // empty? design.addCommand(1, ZRF.FUNCTION, 20); // verify design.addCommand(1, ZRF.IN_ZONE, 1); // third-rank design.addCommand(1, ZRF.FUNCTION, 20); // verify design.addCommand(1, ZRF.PARAM, 1); // $2 design.addCommand(1, ZRF.FUNCTION, 22); // navigate design.addCommand(1, ZRF.FUNCTION, 1); // empty? design.addCommand(1, ZRF.FUNCTION, 20); // verify design.addCommand(1, ZRF.FUNCTION, 25); // to design.addCommand(1, ZRF.FUNCTION, 28); // end design.addCommand(2, ZRF.FUNCTION, 24); // from design.addCommand(2, ZRF.PARAM, 0); // $1 design.addCommand(2, ZRF.FUNCTION, 22); // navigate design.addCommand(2, ZRF.FUNCTION, 2); // enemy? design.addCommand(2, ZRF.FUNCTION, 20); // verify design.addCommand(2, ZRF.IN_ZONE, 0); // last-rank design.addCommand(2, ZRF.FUNCTION, 0); // not design.addCommand(2, ZRF.IF, 4); design.addCommand(2, ZRF.PROMOTE, 4); // Queen design.addCommand(2, ZRF.FUNCTION, 25); // to design.addCommand(2, ZRF.JUMP, 2); design.addCommand(2, ZRF.FUNCTION, 25); // to design.addCommand(2, ZRF.FUNCTION, 28); // end design.addCommand(3, ZRF.FUNCTION, 24); // from design.addCommand(3, ZRF.PARAM, 0); // $1 design.addCommand(3, ZRF.FUNCTION, 22); // navigate design.addCommand(3, ZRF.FUNCTION, 2); // enemy? design.addCommand(3, ZRF.FUNCTION, 20); // verify design.addCommand(3, ZRF.FUNCTION, 5); // last-to? design.addCommand(3, ZRF.FUNCTION, 20); // verify design.addCommand(3, ZRF.LITERAL, 0); // Pawn design.addCommand(3, ZRF.FUNCTION, 10); // piece? design.addCommand(3, ZRF.FUNCTION, 20); // verify design.addCommand(3, ZRF.FUNCTION, 26); // capture design.addCommand(3, ZRF.PARAM, 1); // $2 design.addCommand(3, ZRF.FUNCTION, 22); // navigate design.addCommand(3, ZRF.FUNCTION, 6); // mark design.addCommand(3, ZRF.PARAM, 2); // $3 design.addCommand(3, ZRF.FUNCTION, 22); // navigate design.addCommand(3, ZRF.FUNCTION, 4); // last-from? design.addCommand(3, ZRF.FUNCTION, 20); // verify design.addCommand(3, ZRF.FUNCTION, 7); // back design.addCommand(3, ZRF.FUNCTION, 25); // to design.addCommand(3, ZRF.FUNCTION, 28); // end design.addCommand(4, ZRF.FUNCTION, 24); // from design.addCommand(4, ZRF.PARAM, 0); // $1 design.addCommand(4, ZRF.FUNCTION, 22); // navigate design.addCommand(4, ZRF.FUNCTION, 1); // empty? design.addCommand(4, ZRF.FUNCTION, 0); // not design.addCommand(4, ZRF.IF, 7); design.addCommand(4, ZRF.FORK, 3); design.addCommand(4, ZRF.FUNCTION, 25); // to design.addCommand(4, ZRF.FUNCTION, 28); // end design.addCommand(4, ZRF.PARAM, 1); // $2 design.addCommand(4, ZRF.FUNCTION, 22); // navigate design.addCommand(4, ZRF.JUMP, -8); design.addCommand(4, ZRF.FUNCTION, 3); // friend? design.addCommand(4, ZRF.FUNCTION, 0); // not design.addCommand(4, ZRF.FUNCTION, 20); // verify design.addCommand(4, ZRF.FUNCTION, 25); // to design.addCommand(4, ZRF.FUNCTION, 28); // end design.addCommand(5, ZRF.FUNCTION, 24); // from design.addCommand(5, ZRF.PARAM, 0); // $1 design.addCommand(5, ZRF.FUNCTION, 22); // navigate design.addCommand(5, ZRF.PARAM, 1); // $2 design.addCommand(5, ZRF.FUNCTION, 22); // navigate design.addCommand(5, ZRF.FUNCTION, 3); // friend? design.addCommand(5, ZRF.FUNCTION, 0); // not design.addCommand(5, ZRF.FUNCTION, 20); // verify design.addCommand(5, ZRF.FUNCTION, 25); // to design.addCommand(5, ZRF.FUNCTION, 28); // end design.addCommand(6, ZRF.FUNCTION, 24); // from design.addCommand(6, ZRF.PARAM, 0); // $1 design.addCommand(6, ZRF.FUNCTION, 22); // navigate design.addCommand(6, ZRF.FUNCTION, 3); // friend? design.addCommand(6, ZRF.FUNCTION, 0); // not design.addCommand(6, ZRF.FUNCTION, 20); // verify design.addCommand(6, ZRF.FUNCTION, 25); // to design.addCommand(6, ZRF.FUNCTION, 28); // end design.addCommand(7, ZRF.IN_ZONE, 3); // home design.addCommand(7, ZRF.FUNCTION, 20); // verify design.addCommand(7, ZRF.FUNCTION, 1); // empty? design.addCommand(7, ZRF.FUNCTION, 20); // verify design.addCommand(7, ZRF.FUNCTION, 25); // to design.addCommand(7, ZRF.FUNCTION, 28); // end design.addPriority(0); // drop-type design.addPriority(1); // normal-type design.addPiece("Pawn", 0, 800); design.addMove(0, 0, [4], 1); design.addMove(0, 1, [4, 4], 1); design.addMove(0, 2, [7], 1); design.addMove(0, 2, [3], 1); design.addMove(0, 3, [1, 4, 4], 1); design.addMove(0, 3, [0, 4, 4], 1); design.addPiece("Rook", 1, 5000); design.addMove(1, 4, [4, 4], 1); design.addMove(1, 4, [2, 2], 1); design.addMove(1, 4, [0, 0], 1); design.addMove(1, 4, [1, 1], 1); design.addDrop(1, 7, [], 0); design.addPiece("Knight", 2, 3350); design.addMove(2, 5, [4, 7], 1); design.addMove(2, 5, [4, 3], 1); design.addMove(2, 5, [2, 6], 1); design.addMove(2, 5, [2, 5], 1); design.addMove(2, 5, [0, 7], 1); design.addMove(2, 5, [0, 6], 1); design.addMove(2, 5, [1, 3], 1); design.addMove(2, 5, [1, 5], 1); design.addDrop(2, 7, [], 0); design.addPiece("Bishop", 3, 3450); design.addMove(3, 4, [7, 7], 1); design.addMove(3, 4, [6, 6], 1); design.addMove(3, 4, [3, 3], 1); design.addMove(3, 4, [5, 5], 1); design.addDrop(3, 7, [], 0); design.addPiece("Queen", 4, 9750); design.addMove(4, 4, [4, 4], 1); design.addMove(4, 4, [2, 2], 1); design.addMove(4, 4, [0, 0], 1); design.addMove(4, 4, [1, 1], 1); design.addMove(4, 4, [7, 7], 1); design.addMove(4, 4, [6, 6], 1); design.addMove(4, 4, [3, 3], 1); design.addMove(4, 4, [5, 5], 1); design.addDrop(4, 7, [], 0); design.addPiece("King", 5, 600000); design.addMove(5, 6, [4], 1); design.addMove(5, 6, [2], 1); design.addMove(5, 6, [0], 1); design.addMove(5, 6, [1], 1); design.addMove(5, 6, [7], 1); design.addMove(5, 6, [6], 1); design.addMove(5, 6, [3], 1); design.addMove(5, 6, [5], 1); design.addDrop(5, 7, [], 0); design.setup("White", "Pawn", 48); design.setup("White", "Pawn", 49); design.setup("White", "Pawn", 50); design.setup("White", "Pawn", 51); design.setup("White", "Pawn", 52); design.setup("White", "Pawn", 53); design.setup("White", "Pawn", 54); design.setup("White", "Pawn", 55); design.reserve("White", "Pawn", 0); design.reserve("White", "Knight", 2); design.reserve("White", "Bishop", 2); design.reserve("White", "Rook", 2); design.reserve("White", "Queen", 1); design.reserve("White", "King", 1); design.setup("Black", "Pawn", 8); design.setup("Black", "Pawn", 9); design.setup("Black", "Pawn", 10); design.setup("Black", "Pawn", 11); design.setup("Black", "Pawn", 12); design.setup("Black", "Pawn", 13); design.setup("Black", "Pawn", 14); design.setup("Black", "Pawn", 15); design.reserve("Black", "Pawn", 0); design.reserve("Black", "Knight", 2); design.reserve("Black", "Bishop", 2); design.reserve("Black", "Rook", 2); design.reserve("Black", "Queen", 1); design.reserve("Black", "King", 1); } Dagaz.View.configure = function(view) { view.defBoard("Board"); view.defPiece("WhitePawn", "White Pawn"); view.defPiece("BlackPawn", "Black Pawn"); view.defPiece("WhiteRook", "White Rook"); view.defPiece("BlackRook", "Black Rook"); view.defPiece("WhiteKnight", "White Knight"); view.defPiece("BlackKnight", "Black Knight"); view.defPiece("WhiteBishop", "White Bishop"); view.defPiece("BlackBishop", "Black Bishop"); view.defPiece("WhiteQueen", "White Queen"); view.defPiece("BlackQueen", "Black Queen"); view.defPiece("WhiteKing", "White King"); view.defPiece("BlackKing", "Black King"); view.defPiece("Ko", "Ko"); view.defPosition("a8", 2, 2, 68, 68); view.defPosition("b8", 70, 2, 68, 68); view.defPosition("c8", 138, 2, 68, 68); view.defPosition("d8", 206, 2, 68, 68); view.defPosition("e8", 274, 2, 68, 68); view.defPosition("f8", 342, 2, 68, 68); view.defPosition("g8", 410, 2, 68, 68); view.defPosition("h8", 478, 2, 68, 68); view.defPosition("a7", 2, 70, 68, 68); view.defPosition("b7", 70, 70, 68, 68); view.defPosition("c7", 138, 70, 68, 68); view.defPosition("d7", 206, 70, 68, 68); view.defPosition("e7", 274, 70, 68, 68); view.defPosition("f7", 342, 70, 68, 68); view.defPosition("g7", 410, 70, 68, 68); view.defPosition("h7", 478, 70, 68, 68); view.defPosition("a6", 2, 138, 68, 68); view.defPosition("b6", 70, 138, 68, 68); view.defPosition("c6", 138, 138, 68, 68); view.defPosition("d6", 206, 138, 68, 68); view.defPosition("e6", 274, 138, 68, 68); view.defPosition("f6", 342, 138, 68, 68); view.defPosition("g6", 410, 138, 68, 68); view.defPosition("h6", 478, 138, 68, 68); view.defPosition("a5", 2, 206, 68, 68); view.defPosition("b5", 70, 206, 68, 68); view.defPosition("c5", 138, 206, 68, 68); view.defPosition("d5", 206, 206, 68, 68); view.defPosition("e5", 274, 206, 68, 68); view.defPosition("f5", 342, 206, 68, 68); view.defPosition("g5", 410, 206, 68, 68); view.defPosition("h5", 478, 206, 68, 68); view.defPosition("a4", 2, 274, 68, 68); view.defPosition("b4", 70, 274, 68, 68); view.defPosition("c4", 138, 274, 68, 68); view.defPosition("d4", 206, 274, 68, 68); view.defPosition("e4", 274, 274, 68, 68); view.defPosition("f4", 342, 274, 68, 68); view.defPosition("g4", 410, 274, 68, 68); view.defPosition("h4", 478, 274, 68, 68); view.defPosition("a3", 2, 342, 68, 68); view.defPosition("b3", 70, 342, 68, 68); view.defPosition("c3", 138, 342, 68, 68); view.defPosition("d3", 206, 342, 68, 68); view.defPosition("e3", 274, 342, 68, 68); view.defPosition("f3", 342, 342, 68, 68); view.defPosition("g3", 410, 342, 68, 68); view.defPosition("h3", 478, 342, 68, 68); view.defPosition("a2", 2, 410, 68, 68); view.defPosition("b2", 70, 410, 68, 68); view.defPosition("c2", 138, 410, 68, 68); view.defPosition("d2", 206, 410, 68, 68); view.defPosition("e2", 274, 410, 68, 68); view.defPosition("f2", 342, 410, 68, 68); view.defPosition("g2", 410, 410, 68, 68); view.defPosition("h2", 478, 410, 68, 68); view.defPosition("a1", 2, 478, 68, 68); view.defPosition("b1", 70, 478, 68, 68); view.defPosition("c1", 138, 478, 68, 68); view.defPosition("d1", 206, 478, 68, 68); view.defPosition("e1", 274, 478, 68, 68); view.defPosition("f1", 342, 478, 68, 68); view.defPosition("g1", 410, 478, 68, 68); view.defPosition("h1", 478, 478, 68, 68); view.defPopup("Promote", 127, 100); view.defPopupPosition("X1", 10, 7, 68, 68); view.defPopupPosition("X2", 80, 7, 68, 68); view.defPopupPosition("X3", 150, 7, 68, 68); view.defPopupPosition("X4", 220, 7, 68, 68); <|fim▁hole|><|fim▁end|>
}
<|file_name|>0015_add_tp_path_idx.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.9.11 on 2016-11-04 16:36 from __future__ import unicode_literals from django.db import migrations <|fim▁hole|> ('pootle_app', '0014_set_directory_tp_path'), ] operations = [ migrations.AlterIndexTogether( name='directory', index_together=set([('obsolete', 'tp', 'tp_path'), ('obsolete', 'pootle_path')]), ), ]<|fim▁end|>
class Migration(migrations.Migration): dependencies = [
<|file_name|>api.js<|end_file_name|><|fim▁begin|>YUI.add("yuidoc-meta", function(Y) { Y.YUIDoc = { meta: { "classes": [ "Amplitude", "AudioIn", "Env", "FFT", "Noise", "Oscillator", "Pulse", "SoundFile",<|fim▁hole|> "p5.dom", "p5.sound" ], "modules": [ "p5.dom", "p5.sound" ], "allModules": [ { "displayName": "p5.dom", "name": "p5.dom", "description": "This is the p5.dom library." }, { "displayName": "p5.sound", "name": "p5.sound", "description": "p5.sound extends p5 with Web Audio functionality including audio input, playback, analysis and synthesis." } ] } }; });<|fim▁end|>
"p5.Element", "p5.MediaElement",
<|file_name|>Zinser_Assignment8.py<|end_file_name|><|fim▁begin|>#Mitch Zinser #CSCI 3202 Assignment 8 #Worked with the Wikipedia Example and Brady Auen from math import log2 #For converting numbers to log base 2 '''PIPE TO EXTERNAL FILE WITH > filename.txt''' letters = 'abcdefghijklmnopqrstuvwxyz' '''File to read in data from, change this name to read from other files''' file_name = "typos20.data" test_file = "typos20Test.data" ''' NOTE: Spaces are uncorrupted. Words always have the same number of letters and transition to spaces at the end of the word ''' #Converts input file in format of columns 1 = correct word, columns 2 = space, column 3 = wrong word. One letter column, words separated by "_ _" #Retuns two lists, first list is words in first column, second list is words in second column def data_parser(name): #Store columns first_col = [] second_col = [] #Temporarily store words as they are built word1 = "" word2 = "" #Emission dict #Dictionary that stores the intended letter as key, and observed letters with frequencies as value emis_freq = {} #Fill dictionary with dictionaries, and those with letter entries (init to 0) for i in letters: emis_freq[i] = {} for j in letters: emis_freq[i][j] = 0 #Transition dictionary #Dictionary that stores the first letter (t) as the key, and second letter (t+1) as the second key with frequencies as value tran_freq = {} #Fill dictionary with dictionaries, and those with letter entries (init to 0) for i in (letters+"_"): tran_freq[i] = {} for j in (letters+"_"): tran_freq[i][j] = 0 #Initial dictionary #Dictionary to store frequency that a letter occurs in the first col (hidden, actual) init_freq = {} #Fill dictionary with letter entries (init to 0) for i in (letters+"_"): init_freq[i] = 0 #Open the file with open(name,"r") as data_in: #Store the last char last_char = "" #Bool to see if this is the rist char first_char = True #Iterate through the file line by line for i in data_in.readlines(): #Initial #Increment the first col characters frequency in the intial dict init_freq[i[0]] += 1 #Transition #Make sure this isn't the first if first_char: first_char = False #Otherwise add to the transition frequency dict else: tran_freq[last_char][i[0]] += 1 #Set the last char to be the current first col char that we have added to the dict last_char = i[0] #Check if this line is a separation between words ("_") if i[0] == "_": #Append word to list of words first_col.append(word1) second_col.append(word2) #Reset temperary word storage word1 = "" word2 = "" #Otherwise line is letter else: #Append letters to their temporary storage containers word1 += i[0]<|fim▁hole|> if i[2] in emis_freq[i[0]]: emis_freq[i[0]][i[2]] += 1 else: emis_freq[i[0]][i[2]] = 1 #Cleanup since data file doesn't end in a "_ _" line first_col.append(word1) second_col.append(word2) '''Emission Calulations''' #Add entry to dict 'tot' that holds the total number of times the letter appears #Iterate through keys (actual letters) for i in emis_freq: #Reset total tot = 0 #Iterate through evidence keys for letter i for j in emis_freq[i]: tot += emis_freq[i][j] #Add 'tot' entry to dict emis_freq[i]["tot"] = tot #Now take this data (total) and create a probability dictionary emis_prob = {} #Iterate through keys (actual letters) for i in emis_freq: #Create dictionary for this actual letter in new dict emis_prob[i] = {} #Iterate through evidence keys for letter i for j in emis_freq[i]: #Add one to the numerator and 26 (num of letters) to the denominator emis_prob[i][j] = (emis_freq[i][j]+1)/(emis_freq[i]["tot"]+26) #Add the very small, basically 0 chance of a "_" getting in the mix (chance is 0 in reality) emis_prob[i]["_"] = 1/(emis_freq[i]["tot"]+26) #Remove 'tot' key from probability dict del emis_prob[i]["tot"] '''Spaces are immutable, uncorruptable beasts, and have an emission probability of 1. They are not counted''' emis_prob['_'] = {} emis_prob['_']['_'] = 0.9999999999999999 for i in letters: emis_prob['_'][i] = 0.0000000000000001 '''Transition Calulations''' #Add entry to dict 'tot' that holds the total number of times the letter appears #Iterate through keys (actual letters) for i in tran_freq: #Reset total tot = 0 #Iterate through evidence keys for letter i for j in tran_freq[i]: tot += tran_freq[i][j] #Add 'tot' entry to dict tran_freq[i]["tot"] = tot #Now take this data (total) and create a probability dictionary tran_prob = {} #Iterate through keys (actual letters) for i in tran_freq: #Create dictionary for this actual letter in new dict tran_prob[i] = {} #Iterate through evidence keys for letter i for j in tran_freq[i]: #Add one to the numerator and 27 (num of letters + '_') to the denominator tran_prob[i][j] = (tran_freq[i][j]+1)/(tran_freq[i]["tot"]+27) #Remove 'tot' key from probability dict del tran_prob[i]["tot"] '''Initial Calculations''' #Count the total number of characters in the first col (hidden) tot = 0 for i in init_freq: tot += init_freq[i] #Dict that stores the probabilities of each letter init_prob = {} for i in init_freq: init_prob[i] = (init_freq[i]/tot)#(init_freq[i]/len("_".join(first_col))) #Return both lists as and probability dtionary return first_col,second_col,emis_prob,tran_prob,init_prob #Viterbi algorithm, returns final prob of getting to end and likely route (sequence of letters) #Takes in: Evid (observed state sequence, one giant string with underscores for spaces), hidd (list of hidden states, eg. list of possible letters), star (dict of starting probabilities), tran (transition probability dict), emis (emission probability dict) #Tran must be in format tran[prev][cur] #Emis must be in format emis[hidden][observed] def furby(evid, hidd, star, tran, emis): '''Spaces have a 1.0 emission prob, since they are uncorrupted''' '''Use math libraries log2 to convert to log base 2 for math. Convert back with math libraries pow(2, num) if desired''' '''Log2 can still use max. log2(0.8) > log2(0.2)''' #Create list that uses the time as the index and the value is a dict to store probability P = [{}] #Create a dict for the path path = {} #Create dict for t(0) (seed dict with inital entries) #Iterate through start dict (Contains all states that sequence can start with) for i in star: #Calculate probability with start[letter]*emission (add instead of multiply with log numbers) P[0][i] = log2(star[i])+log2(emis[i][evid[0]]) path[i] = [i] #Run for t > 1, start at second letter for i in range(1,len(evid)): #Create new dict at end of list of dicts (dict for each time value) P.append({}) #Dict to temporarily store path for this iteration temp_path = {} #Iterate through all possible states that are connected to the previous state chosen for j in hidd: #Use list comprehension to iterate through states, calculate trans*emis*P[t-1] for each possible state, find max and store that in path (prob, state) = max((P[i-1][k] + log2(tran[k][j]) + log2(emis[j][evid[i]]), k) for k in hidd) P[i][j] = prob temp_path[j] = path[state] + [j] # Don't need to remember the old paths path = temp_path #Find max prob in the last iteration of the list of dicts (P) n = len(evid)-1 (prob, state) = max((P[n][y], y) for y in hidd) #Return the probability for the best last state and the path for it as a list of 1 char strings return prob,path[state] #Function that takes in 2 strings of equal length and returns the error percent. String 1 is the correct string, string 2 is checked for errors def error_rate(correct, check): errors = 0 for i in range(0,len(correct)): if correct[i] != check[i]: errors += 1 return errors/len(correct) if __name__ == "__main__": #Set correct and actual as lists to hold words in each column correct,actual,conditional,transitional,initial = data_parser(file_name) #Get the data from another file to run the algorithm on. Get the 1st and 3rd column as strings #String that had the hidden state sequence (1st column) test_hidden = "" #String that stores the observed column (3rd column) test_observed = "" #Open file to get data from with open(test_file,"r") as test_in: #Iterate through lines of file for i in test_in.readlines(): #Store first column letter test_hidden += i[0] #Store third column letter test_observed += i[2] #Run Viterbi prob, path = furby(test_observed, letters+"_", initial, transitional, conditional) #Calculate error rates print("Error rate of", test_file, "before Viterbi:",error_rate(test_hidden,test_observed)*100,"%") print("Error rate of", test_file, "after Viterbi:",error_rate(test_hidden,path)*100,"%") print("--------State Sequence--------") #Print final sequence in more readable format by joining list print("".join(path)) #Print the probability of the final state for fun print("--------Final State Probability--------") print("In Log2:", prob) print("In Decimal:", pow(2,prob)) ''' Part 1 #Print conditional print("----------------Condition----------------") #Iterate through keys of a sorted dictionary for i in sorted(conditional): print("--------Hidden:",i,"--------") #Iterate through keys of dict in dict (value dict to the key "i") for j in sorted(conditional[i]): #Print the number of occurances print(j, conditional[i][j]) #Print transitional print("----------------Transition----------------") #Iterate through keys of a sorted dictionary for i in sorted(transitional): print("--------Previous:",i,"--------") #Iterate through keys of dict in dict (value dict to the key "i") for j in sorted(transitional[i]): #Print the number of occurances print(j, transitional[i][j]) #Print Initial print("----------------Initial (Using Hidden)----------------") #Iterate through key of sorted dict for i in sorted(initial): print(i, initial[i]) '''<|fim▁end|>
word2 += i[2]
<|file_name|>todoviewview.cpp<|end_file_name|><|fim▁begin|>/* This file is part of KOrganizer. Copyright (c) 2008 Thomas Thrainer <[email protected]> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. As a special exception, permission is given to link this program with any edition of Qt, and distribute the resulting executable, without including the source code for Qt in the source distribution. */ #include "todoviewview.h" #include <KLocalizedString> #include <KMenu> #include <QAction> #include <QContextMenuEvent> #include <QEvent> #include <QHeaderView> #include <QMouseEvent> TodoViewView::TodoViewView( QWidget *parent ) : QTreeView( parent ), mHeaderPopup( 0 ), mIgnoreNextMouseRelease( false ) { header()->installEventFilter( this ); setAlternatingRowColors( true ); connect( &mExpandTimer, SIGNAL(timeout()), SLOT(expandParent()) ); mExpandTimer.setInterval( 1000 ); header()->setStretchLastSection( false ); } bool TodoViewView::isEditing( const QModelIndex &index ) const { return state() & QAbstractItemView::EditingState && currentIndex() == index; } bool TodoViewView::eventFilter( QObject *watched, QEvent *event ) { Q_UNUSED( watched ); if ( event->type() == QEvent::ContextMenu ) { QContextMenuEvent *e = static_cast<QContextMenuEvent *>( event ); if ( !mHeaderPopup ) { mHeaderPopup = new KMenu( this ); mHeaderPopup->addTitle( i18n( "View Columns" ) ); // First entry can't be disabled for ( int i = 1; i < model()->columnCount(); ++i ) { QAction *tmp = mHeaderPopup->addAction( model()->headerData( i, Qt::Horizontal ).toString() ); tmp->setData( QVariant( i ) ); tmp->setCheckable( true ); mColumnActions << tmp; } connect( mHeaderPopup, SIGNAL(triggered(QAction*)), this, SLOT(toggleColumnHidden(QAction*)) ); } foreach ( QAction *action, mColumnActions ) { int column = action->data().toInt(); action->setChecked( !isColumnHidden( column ) ); } mHeaderPopup->popup( mapToGlobal( e->pos() ) ); return true; } return false; } void TodoViewView::toggleColumnHidden( QAction *action ) { if ( action->isChecked() ) { showColumn( action->data().toInt() ); } else { hideColumn( action->data().toInt() ); } emit visibleColumnCountChanged(); } QModelIndex TodoViewView::moveCursor( CursorAction cursorAction, Qt::KeyboardModifiers modifiers ) { QModelIndex current = currentIndex(); if ( !current.isValid() ) { return QTreeView::moveCursor( cursorAction, modifiers ); } switch ( cursorAction ) { case MoveNext: { // try to find an editable item right of the current one QModelIndex tmp = getNextEditableIndex(<|fim▁hole|> return tmp; } // check if the current item is expanded, and find an editable item // just below it if so current = current.sibling( current.row(), 0 ); if ( isExpanded( current ) ) { tmp = getNextEditableIndex( current.child( 0, 0 ), 1 ); if ( tmp.isValid() ) { return tmp; } } // find an editable item in the item below the currently edited one tmp = getNextEditableIndex( current.sibling( current.row() + 1, 0 ), 1 ); if ( tmp.isValid() ) { return tmp; } // step back a hierarchy level, and search for an editable item there while ( current.isValid() ) { current = current.parent(); tmp = getNextEditableIndex( current.sibling( current.row() + 1, 0 ), 1 ); if ( tmp.isValid() ) { return tmp; } } return QModelIndex(); } case MovePrevious: { // try to find an editable item left of the current one QModelIndex tmp = getNextEditableIndex( current.sibling( current.row(), current.column() - 1 ), -1 ); if ( tmp.isValid() ) { return tmp; } int lastCol = model()->columnCount( QModelIndex() ) - 1; // search on top of the item, also include expanded items tmp = current.sibling( current.row() - 1, 0 ); while ( tmp.isValid() && isExpanded( tmp ) ) { tmp = tmp.child( model()->rowCount( tmp ) - 1, 0 ); } if ( tmp.isValid() ) { tmp = getNextEditableIndex( tmp.sibling( tmp.row(), lastCol ), -1 ); if ( tmp.isValid() ) { return tmp; } } // step back a hierarchy level, and search for an editable item there current = current.parent(); return getNextEditableIndex( current.sibling( current.row(), lastCol ), -1 ); } default: break; } return QTreeView::moveCursor( cursorAction, modifiers ); } QModelIndex TodoViewView::getNextEditableIndex( const QModelIndex &cur, int inc ) { if ( !cur.isValid() ) { return QModelIndex(); } QModelIndex tmp; int colCount = model()->columnCount( QModelIndex() ); int end = inc == 1 ? colCount : -1; for ( int c = cur.column(); c != end; c += inc ) { tmp = cur.sibling( cur.row(), c ); if ( ( tmp.flags() & Qt::ItemIsEditable ) && !isIndexHidden( tmp ) ) { return tmp; } } return QModelIndex(); } void TodoViewView::mouseReleaseEvent ( QMouseEvent *event ) { mExpandTimer.stop(); if ( mIgnoreNextMouseRelease ) { mIgnoreNextMouseRelease = false; return; } if ( !indexAt( event->pos() ).isValid() ) { clearSelection(); event->accept(); } else { QTreeView::mouseReleaseEvent( event ); } } void TodoViewView::mouseMoveEvent( QMouseEvent *event ) { mExpandTimer.stop(); QTreeView::mouseMoveEvent( event ); } void TodoViewView::mousePressEvent( QMouseEvent *event ) { mExpandTimer.stop(); QModelIndex index = indexAt( event->pos() ); if ( index.isValid() && event->button() == Qt::LeftButton ) { mExpandTimer.start(); } QTreeView::mousePressEvent( event ); } void TodoViewView::expandParent() { QModelIndex index = indexAt( viewport()->mapFromGlobal( QCursor::pos() ) ); if ( index.isValid() ) { mIgnoreNextMouseRelease = true; QKeyEvent keyEvent = QKeyEvent( QEvent::KeyPress, Qt::Key_Asterisk, Qt::NoModifier ); QTreeView::keyPressEvent( &keyEvent ); } }<|fim▁end|>
current.sibling( current.row(), current.column() + 1 ), 1 ); if ( tmp.isValid() ) {
<|file_name|>storage_folding.cpp<|end_file_name|><|fim▁begin|>#include <stdio.h> #include "Halide.h" using namespace Halide; // Override Halide's malloc and free size_t custom_malloc_size = 0; void *my_malloc(void *user_context, size_t x) { custom_malloc_size = x; void *orig = malloc(x+32); void *ptr = (void *)((((size_t)orig + 32) >> 5) << 5); ((void **)ptr)[-1] = orig; return ptr; } void my_free(void *user_context, void *ptr) { free(((void**)ptr)[-1]); } int main(int argc, char **argv) { Var x, y; { Func f, g; f(x, y) = x; g(x, y) = f(x-1, y) + f(x, y-1); f.store_root().compute_at(g, x); g.set_custom_allocator(my_malloc, my_free); Image<int> im = g.realize(1000, 1000); // Should fold by a factor of two, but sliding window analysis makes it round up to 4. if (custom_malloc_size == 0 || custom_malloc_size > 1002*4*sizeof(int)) { printf("Scratch space allocated was %d instead of %d\n", (int)custom_malloc_size, (int)(1002*4*sizeof(int))); return -1; } }<|fim▁hole|> { custom_malloc_size = 0; Func f, g; g(x, y) = x * y; f(x, y) = g(2*x, 2*y) + g(2*x+1, 2*y+1); // Each instance of f uses a non-overlapping 2x2 box of // g. Should be able to fold storage of g down to a stack // allocation. g.compute_at(f, x).store_root(); f.set_custom_allocator(my_malloc, my_free); Image<int> im = f.realize(1000, 1000); if (custom_malloc_size != 0) { printf("There should not have been a heap allocation\n"); return -1; } for (int y = 0; y < im.height(); y++) { for (int x = 0; x < im.width(); x++) { int correct = (2*x) * (2*y) + (2*x+1) * (2*y+1); if (im(x, y) != correct) { printf("im(%d, %d) = %d instead of %d\n", x, y, im(x, y), correct); return -1; } } } } { custom_malloc_size = 0; Func f, g; g(x, y) = x * y; f(x, y) = g(x, 2*y) + g(x+3, 2*y+1); // Each instance of f uses a non-overlapping 2-scanline slice // of g in y, and is a stencil over x. Should be able to fold // both x and y. g.compute_at(f, x).store_root(); f.set_custom_allocator(my_malloc, my_free); Image<int> im = f.realize(1000, 1000); if (custom_malloc_size != 0) { printf("There should not have been a heap allocation\n"); return -1; } for (int y = 0; y < im.height(); y++) { for (int x = 0; x < im.width(); x++) { int correct = x * (2*y) + (x+3) * (2*y+1); if (im(x, y) != correct) { printf("im(%d, %d) = %d instead of %d\n", x, y, im(x, y), correct); return -1; } } } } { custom_malloc_size = 0; Func f, g; g(x, y) = x * y; f(x, y) = g(2*x, y) + g(2*x+1, y+3); // Each instance of f uses a non-overlapping 2-scanline slice // of g in x, and is a stencil over y. We can't fold in x due // to the stencil in y. We need to keep around entire // scanlines. g.compute_at(f, x).store_root(); f.set_custom_allocator(my_malloc, my_free); Image<int> im = f.realize(1000, 1000); if (custom_malloc_size == 0 || custom_malloc_size > 2*1002*4*sizeof(int)) { printf("Scratch space allocated was %d instead of %d\n", (int)custom_malloc_size, (int)(1002*4*sizeof(int))); return -1; } for (int y = 0; y < im.height(); y++) { for (int x = 0; x < im.width(); x++) { int correct = (2*x) * y + (2*x+1) * (y+3); if (im(x, y) != correct) { printf("im(%d, %d) = %d instead of %d\n", x, y, im(x, y), correct); return -1; } } } } { custom_malloc_size = 0; Func f, g; g(x, y) = x * y; f(x, y) = g(x, y); Var yo, yi; f.bound(y, 0, (f.output_buffer().height()/8)*8).split(y, yo, yi, 8); g.compute_at(f, yo).store_root(); // The split logic shouldn't interfere with the ability to // fold f down to an 8-scanline allocation, but it's only // correct to fold if we know the output height is a multiple // of the split factor. f.set_custom_allocator(my_malloc, my_free); Image<int> im = f.realize(1000, 1000); if (custom_malloc_size == 0 || custom_malloc_size > 1000*8*sizeof(int)) { printf("Scratch space allocated was %d instead of %d\n", (int)custom_malloc_size, (int)(1000*8*sizeof(int))); return -1; } for (int y = 0; y < im.height(); y++) { for (int x = 0; x < im.width(); x++) { int correct = x*y; if (im(x, y) != correct) { printf("im(%d, %d) = %d instead of %d\n", x, y, im(x, y), correct); return -1; } } } } printf("Success!\n"); return 0; }<|fim▁end|>
<|file_name|>decoder.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Base classes and functions for dynamic decoding.""" import abc import tensorflow as tf from tensorflow_addons.utils.types import TensorLike from typeguard import typechecked from typing import Any, Optional, Tuple, Union # TODO: Find public API alternatives to these from tensorflow.python.ops import control_flow_util class Decoder(metaclass=abc.ABCMeta): """An RNN Decoder abstract interface object. Concepts used by this interface: - `inputs`: (structure of) tensors and TensorArrays that is passed as input to the RNN cell composing the decoder, at each time step. - `state`: (structure of) tensors and TensorArrays that is passed to the RNN cell instance as the state. - `finished`: boolean tensor telling whether each sequence in the batch is finished. - `training`: boolean whether it should behave in training mode or in inference mode. - `outputs`: instance of `tfa.seq2seq.BasicDecoderOutput`. Result of the decoding, at each time step. """ @property def batch_size(self): """The batch size of input values.""" raise NotImplementedError @property def output_size(self): """A (possibly nested tuple of...) integer[s] or `TensorShape` object[s].""" raise NotImplementedError @property def output_dtype(self): """A (possibly nested tuple of...) dtype[s].""" raise NotImplementedError @abc.abstractmethod def initialize(self, name=None): """Called before any decoding iterations. This methods must compute initial input values and initial state. Args: name: Name scope for any created operations. Returns: `(finished, initial_inputs, initial_state)`: initial values of 'finished' flags, inputs and state. """ raise NotImplementedError @abc.abstractmethod def step(self, time, inputs, state, training=None, name=None): """Called per step of decoding (but only once for dynamic decoding). Args: time: Scalar `int32` tensor. Current step number. inputs: RNN cell input (possibly nested tuple of) tensor[s] for this time step. state: RNN cell state (possibly nested tuple of) tensor[s] from previous time step. training: Python boolean. Indicates whether the layer should behave in training mode or in inference mode. Only relevant when `dropout` or `recurrent_dropout` is used. name: Name scope for any created operations. Returns: `(outputs, next_state, next_inputs, finished)`: `outputs` is an object containing the decoder output, `next_state` is a (structure of) state tensors and TensorArrays, `next_inputs` is the tensor that should be used as input for the next step, `finished` is a boolean tensor telling whether the sequence is complete, for each sequence in the batch. """ raise NotImplementedError def finalize(self, outputs, final_state, sequence_lengths): raise NotImplementedError @property def tracks_own_finished(self): """Describes whether the Decoder keeps track of finished states. Most decoders will emit a true/false `finished` value independently at each time step. In this case, the `tfa.seq2seq.dynamic_decode` function keeps track of which batch entries are already finished, and performs a logical OR to insert new batches to the finished set. <|fim▁hole|> own finished state by setting this property to `True`. Returns: Python bool. """ return False class BaseDecoder(tf.keras.layers.Layer): """An RNN Decoder that is based on a Keras layer. Concepts used by this interface: - `inputs`: (structure of) Tensors and TensorArrays that is passed as input to the RNN cell composing the decoder, at each time step. - `state`: (structure of) Tensors and TensorArrays that is passed to the RNN cell instance as the state. - `memory`: tensor that is usually the full output of the encoder, which will be used for the attention wrapper for the RNN cell. - `finished`: boolean tensor telling whether each sequence in the batch is finished. - `training`: boolean whether it should behave in training mode or in inference mode. - `outputs`: instance of `tfa.seq2seq.BasicDecoderOutput`. Result of the decoding, at each time step. """ @typechecked def __init__( self, output_time_major: bool = False, impute_finished: bool = False, maximum_iterations: Optional[TensorLike] = None, parallel_iterations: int = 32, swap_memory: bool = False, **kwargs, ): self.output_time_major = output_time_major self.impute_finished = impute_finished self.maximum_iterations = maximum_iterations self.parallel_iterations = parallel_iterations self.swap_memory = swap_memory super().__init__(**kwargs) def call(self, inputs, initial_state=None, training=None, **kwargs): init_kwargs = kwargs init_kwargs["initial_state"] = initial_state return dynamic_decode( self, output_time_major=self.output_time_major, impute_finished=self.impute_finished, maximum_iterations=self.maximum_iterations, parallel_iterations=self.parallel_iterations, swap_memory=self.swap_memory, training=training, decoder_init_input=inputs, decoder_init_kwargs=init_kwargs, ) @property def batch_size(self): """The batch size of input values.""" raise NotImplementedError @property def output_size(self): """A (possibly nested tuple of...) integer[s] or `TensorShape` object[s].""" raise NotImplementedError @property def output_dtype(self): """A (possibly nested tuple of...) dtype[s].""" raise NotImplementedError def initialize(self, inputs, initial_state=None, **kwargs): """Called before any decoding iterations. This methods must compute initial input values and initial state. Args: inputs: (structure of) tensors that contains the input for the decoder. In the normal case, it's a tensor with shape [batch, timestep, embedding]. initial_state: (structure of) tensors that contains the initial state for the RNN cell. **kwargs: Other arguments that are passed in from layer.call() method. It could contains item like input `sequence_length`, or masking for input. Returns: `(finished, initial_inputs, initial_state)`: initial values of 'finished' flags, inputs and state. """ raise NotImplementedError def step(self, time, inputs, state, training): """Called per step of decoding (but only once for dynamic decoding). Args: time: Scalar `int32` tensor. Current step number. inputs: RNN cell input (possibly nested tuple of) tensor[s] for this time step. state: RNN cell state (possibly nested tuple of) tensor[s] from previous time step. training: Python boolean. Indicates whether the layer should behave in training mode or in inference mode. Returns: `(outputs, next_state, next_inputs, finished)`: `outputs` is an object containing the decoder output, `next_state` is a (structure of) state tensors and TensorArrays, `next_inputs` is the tensor that should be used as input for the next step, `finished` is a boolean tensor telling whether the sequence is complete, for each sequence in the batch. """ raise NotImplementedError def finalize(self, outputs, final_state, sequence_lengths): raise NotImplementedError @property def tracks_own_finished(self): """Describes whether the Decoder keeps track of finished states. Most decoders will emit a true/false `finished` value independently at each time step. In this case, the `tfa.seq2seq.dynamic_decode` function keeps track of which batch entries are already finished, and performs a logical OR to insert new batches to the finished set. Some decoders, however, shuffle batches / beams between time steps and `tfa.seq2seq.dynamic_decode` will mix up the finished state across these entries because it does not track the reshuffle across time steps. In this case, it is up to the decoder to declare that it will keep track of its own finished state by setting this property to `True`. Returns: Python bool. """ return False # TODO(scottzhu): Add build/get_config/from_config and other layer methods. @typechecked def dynamic_decode( decoder: Union[Decoder, BaseDecoder], output_time_major: bool = False, impute_finished: bool = False, maximum_iterations: Optional[TensorLike] = None, parallel_iterations: int = 32, swap_memory: bool = False, training: Optional[bool] = None, scope: Optional[str] = None, enable_tflite_convertible: bool = False, **kwargs, ) -> Tuple[Any, Any, Any]: """Runs dynamic decoding with a decoder. Calls `initialize()` once and `step()` repeatedly on the decoder object. Args: decoder: A `tfa.seq2seq.Decoder` or `tfa.seq2seq.BaseDecoder` instance. output_time_major: Python boolean. Default: `False` (batch major). If `True`, outputs are returned as time major tensors (this mode is faster). Otherwise, outputs are returned as batch major tensors (this adds extra time to the computation). impute_finished: Python boolean. If `True`, then states for batch entries which are marked as finished get copied through and the corresponding outputs get zeroed out. This causes some slowdown at each time step, but ensures that the final state and outputs have the correct values and that backprop ignores time steps that were marked as finished. maximum_iterations: A strictly positive `int32` scalar, the maximum allowed number of decoding steps. Default is `None` (decode until the decoder is fully done). parallel_iterations: Argument passed to `tf.while_loop`. swap_memory: Argument passed to `tf.while_loop`. training: Python boolean. Indicates whether the layer should behave in training mode or in inference mode. Only relevant when `dropout` or `recurrent_dropout` is used. scope: Optional name scope to use. enable_tflite_convertible: Python boolean. If `True`, then the variables of `TensorArray` become of 1-D static shape. Also zero pads in the output tensor will be discarded. Default: `False`. **kwargs: dict, other keyword arguments for dynamic_decode. It might contain arguments for `BaseDecoder` to initialize, which takes all tensor inputs during call(). Returns: `(final_outputs, final_state, final_sequence_lengths)`. Raises: ValueError: if `maximum_iterations` is provided but is not a scalar. """ with tf.name_scope(scope or "decoder"): is_xla = ( not tf.executing_eagerly() and control_flow_util.GraphOrParentsInXlaContext( tf.compat.v1.get_default_graph() ) ) if maximum_iterations is not None: maximum_iterations = tf.convert_to_tensor( maximum_iterations, dtype=tf.int32, name="maximum_iterations" ) if maximum_iterations.shape.ndims != 0: raise ValueError("maximum_iterations must be a scalar") tf.debugging.assert_greater( maximum_iterations, 0, message="maximum_iterations should be greater than 0", ) elif is_xla: raise ValueError("maximum_iterations is required for XLA compilation.") if isinstance(decoder, Decoder): initial_finished, initial_inputs, initial_state = decoder.initialize() else: # For BaseDecoder that takes tensor inputs during call. decoder_init_input = kwargs.pop("decoder_init_input", None) decoder_init_kwargs = kwargs.pop("decoder_init_kwargs", {}) initial_finished, initial_inputs, initial_state = decoder.initialize( decoder_init_input, **decoder_init_kwargs ) if enable_tflite_convertible: # Assume the batch_size = 1 for inference. # So we can change 2-D TensorArray into 1-D by reshaping it. tf.debugging.assert_equal( decoder.batch_size, 1, message="TFLite conversion requires a batch size of 1", ) zero_outputs = tf.nest.map_structure( lambda shape, dtype: tf.reshape( tf.zeros(_prepend_batch(decoder.batch_size, shape), dtype=dtype), [-1], ), decoder.output_size, decoder.output_dtype, ) else: zero_outputs = tf.nest.map_structure( lambda shape, dtype: tf.zeros( _prepend_batch(decoder.batch_size, shape), dtype=dtype ), decoder.output_size, decoder.output_dtype, ) if maximum_iterations is not None: initial_finished = tf.logical_or(initial_finished, 0 >= maximum_iterations) initial_sequence_lengths = tf.zeros_like(initial_finished, dtype=tf.int32) initial_time = tf.constant(0, dtype=tf.int32) def _shape(batch_size, from_shape): if not isinstance(from_shape, tf.TensorShape) or from_shape.ndims == 0: return None else: batch_size = tf.get_static_value( tf.convert_to_tensor(batch_size, name="batch_size") ) return tf.TensorShape([batch_size]).concatenate(from_shape) dynamic_size = maximum_iterations is None or not is_xla # The dynamic shape `TensorArray` is not allowed in TFLite yet. dynamic_size = dynamic_size and (not enable_tflite_convertible) def _create_ta(s, d): if enable_tflite_convertible: # TFLite requires 1D element_shape. if isinstance(s, tf.TensorShape) and s.ndims == 0: s = (1,) element_shape = s else: element_shape = _shape(decoder.batch_size, s) return tf.TensorArray( dtype=d, size=0 if dynamic_size else maximum_iterations, dynamic_size=dynamic_size, element_shape=element_shape, ) initial_outputs_ta = tf.nest.map_structure( _create_ta, decoder.output_size, decoder.output_dtype ) def condition( unused_time, unused_outputs_ta, unused_state, unused_inputs, finished, unused_sequence_lengths, ): return tf.logical_not(tf.reduce_all(finished)) def body(time, outputs_ta, state, inputs, finished, sequence_lengths): """Internal while_loop body. Args: time: scalar int32 tensor. outputs_ta: structure of TensorArray. state: (structure of) state tensors and TensorArrays. inputs: (structure of) input tensors. finished: bool tensor (keeping track of what's finished). sequence_lengths: int32 tensor (keeping track of time of finish). Returns: `(time + 1, outputs_ta, next_state, next_inputs, next_finished, next_sequence_lengths)`. ``` """ (next_outputs, decoder_state, next_inputs, decoder_finished) = decoder.step( time, inputs, state, training ) decoder_state_sequence_lengths = False if decoder.tracks_own_finished: next_finished = decoder_finished lengths = getattr(decoder_state, "lengths", None) if lengths is not None: # sequence lengths are provided by decoder_state.lengths; # overwrite our sequence lengths. decoder_state_sequence_lengths = True sequence_lengths = tf.cast(lengths, tf.int32) else: next_finished = tf.logical_or(decoder_finished, finished) if decoder_state_sequence_lengths: # Just pass something through the loop; at the next iteration # we'll pull the sequence lengths from the decoder_state again. next_sequence_lengths = sequence_lengths else: next_sequence_lengths = tf.where( tf.logical_not(finished), tf.fill(tf.shape(sequence_lengths), time + 1), sequence_lengths, ) tf.nest.assert_same_structure(state, decoder_state) tf.nest.assert_same_structure(outputs_ta, next_outputs) tf.nest.assert_same_structure(inputs, next_inputs) # Zero out output values past finish if impute_finished: def zero_out_finished(out, zero): if finished.shape.rank < zero.shape.rank: broadcast_finished = tf.broadcast_to( tf.expand_dims(finished, axis=-1), zero.shape ) return tf.where(broadcast_finished, zero, out) else: return tf.where(finished, zero, out) emit = tf.nest.map_structure( zero_out_finished, next_outputs, zero_outputs ) else: emit = next_outputs # Copy through states past finish def _maybe_copy_state(new, cur): # TensorArrays and scalar states get passed through. if isinstance(cur, tf.TensorArray): pass_through = True else: new.set_shape(cur.shape) pass_through = new.shape.ndims == 0 if not pass_through: broadcast_finished = tf.broadcast_to( tf.expand_dims(finished, axis=-1), new.shape ) return tf.where(broadcast_finished, cur, new) else: return new if impute_finished: next_state = tf.nest.map_structure( _maybe_copy_state, decoder_state, state ) else: next_state = decoder_state if enable_tflite_convertible: # Reshape to 1-D. emit = tf.nest.map_structure(lambda x: tf.reshape(x, [-1]), emit) outputs_ta = tf.nest.map_structure( lambda ta, out: ta.write(time, out), outputs_ta, emit ) return ( time + 1, outputs_ta, next_state, next_inputs, next_finished, next_sequence_lengths, ) res = tf.while_loop( condition, body, loop_vars=( initial_time, initial_outputs_ta, initial_state, initial_inputs, initial_finished, initial_sequence_lengths, ), parallel_iterations=parallel_iterations, maximum_iterations=maximum_iterations, swap_memory=swap_memory, ) final_outputs_ta = res[1] final_state = res[2] final_sequence_lengths = res[5] final_outputs = tf.nest.map_structure(lambda ta: ta.stack(), final_outputs_ta) try: final_outputs, final_state = decoder.finalize( final_outputs, final_state, final_sequence_lengths ) except NotImplementedError: pass if not output_time_major: if enable_tflite_convertible: # Reshape the output to the original shape. def _restore_batch(x): return tf.expand_dims(x, [1]) final_outputs = tf.nest.map_structure(_restore_batch, final_outputs) final_outputs = tf.nest.map_structure(_transpose_batch_time, final_outputs) return final_outputs, final_state, final_sequence_lengths def _prepend_batch(batch_size, shape): """Prepends the batch dimension to the shape. If the batch_size value is known statically, this function returns a TensorShape, otherwise a Tensor. """ if isinstance(batch_size, tf.Tensor): static_batch_size = tf.get_static_value(batch_size) else: static_batch_size = batch_size if static_batch_size is None: return tf.concat(([batch_size], shape), axis=0) return [static_batch_size] + shape def _transpose_batch_time(tensor): """Transposes the batch and time dimension of tensor if its rank is at least 2.""" shape = tensor.shape if shape.rank is not None and shape.rank < 2: return tensor perm = tf.concat(([1, 0], tf.range(2, tf.rank(tensor))), axis=0) return tf.transpose(tensor, perm)<|fim▁end|>
Some decoders, however, shuffle batches / beams between time steps and `tfa.seq2seq.dynamic_decode` will mix up the finished state across these entries because it does not track the reshuffle across time steps. In this case, it is up to the decoder to declare that it will keep track of its
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2014 by SiegeLord // // All rights reserved. Distributed under ZLib. For full terms see the file LICENSE. #![crate_name="allegro_dialog_sys"] #![crate_type = "lib"] extern crate allegro_sys; #[macro_use] extern crate allegro_util; extern crate libc; pub use allegro_dialog::*; <|fim▁hole|> use libc::*; use allegro_util::c_bool; use allegro_sys::{ALLEGRO_DISPLAY, ALLEGRO_EVENT_SOURCE}; opaque!(ALLEGRO_FILECHOOSER); opaque!(ALLEGRO_TEXTLOG); pub const ALLEGRO_FILECHOOSER_FILE_MUST_EXIST: u32 = 1; pub const ALLEGRO_FILECHOOSER_SAVE: u32 = 2; pub const ALLEGRO_FILECHOOSER_FOLDER: u32 = 4; pub const ALLEGRO_FILECHOOSER_PICTURES: u32 = 8; pub const ALLEGRO_FILECHOOSER_SHOW_HIDDEN: u32 = 16; pub const ALLEGRO_FILECHOOSER_MULTIPLE: u32 = 32; pub const ALLEGRO_MESSAGEBOX_WARN: u32 = 1; pub const ALLEGRO_MESSAGEBOX_ERROR: u32 = 2; pub const ALLEGRO_MESSAGEBOX_OK_CANCEL: u32 = 4; pub const ALLEGRO_MESSAGEBOX_YES_NO: u32 = 8; pub const ALLEGRO_MESSAGEBOX_QUESTION: u32 = 16; pub const ALLEGRO_TEXTLOG_NO_CLOSE: u32 = 1; pub const ALLEGRO_TEXTLOG_MONOSPACE: u32 = 2; pub const ALLEGRO_EVENT_NATIVE_DIALOG_CLOSE: c_uint = 600; extern "C" { pub fn al_init_native_dialog_addon() -> c_bool; pub fn al_shutdown_native_dialog_addon(); pub fn al_create_native_file_dialog(initial_path: *const c_char, title: *const c_char, patterns: *const c_char, mode: c_int) -> *mut ALLEGRO_FILECHOOSER; pub fn al_show_native_file_dialog(display: *mut ALLEGRO_DISPLAY, dialog: *mut ALLEGRO_FILECHOOSER) -> c_bool; pub fn al_get_native_file_dialog_count(dialog: *const ALLEGRO_FILECHOOSER) -> c_int; pub fn al_get_native_file_dialog_path(dialog: *const ALLEGRO_FILECHOOSER, index: size_t) -> *const c_char; pub fn al_destroy_native_file_dialog(dialog: *mut ALLEGRO_FILECHOOSER); pub fn al_show_native_message_box(display: *mut ALLEGRO_DISPLAY, title: *const c_char, heading: *const c_char, text: *const c_char, buttons: *const c_char, flags: c_int) -> c_int; pub fn al_open_native_text_log(title: *const c_char, flags: c_int) -> *mut ALLEGRO_TEXTLOG; pub fn al_close_native_text_log(textlog: *mut ALLEGRO_TEXTLOG); pub fn al_append_native_text_log(textlog: *mut ALLEGRO_TEXTLOG, format: *const c_char, ...); pub fn al_get_native_text_log_event_source(textlog: *mut ALLEGRO_TEXTLOG) -> *mut ALLEGRO_EVENT_SOURCE; pub fn al_get_allegro_native_dialog_version() -> uint32_t; } }<|fim▁end|>
pub mod allegro_dialog { #![allow(non_camel_case_types)]
<|file_name|>re.4.py<|end_file_name|><|fim▁begin|>import re<|fim▁hole|>p = re.compile(r'(\w+) (\w+)(?P<sign>.*)', re.DOTALL) print re.DOTALL print "p.pattern:", p.pattern print "p.flags:", p.flags print "p.groups:", p.groups print "p.groupindex:", p.groupindex<|fim▁end|>
<|file_name|>exec_date_after_start_date_dep.py<|end_file_name|><|fim▁begin|># # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information<|fim▁hole|># with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from airflow.ti_deps.deps.base_ti_dep import BaseTIDep from airflow.utils.session import provide_session class ExecDateAfterStartDateDep(BaseTIDep): NAME = "Execution Date" IGNOREABLE = True @provide_session def _get_dep_statuses(self, ti, session, dep_context): if ti.task.start_date and ti.execution_date < ti.task.start_date: yield self._failing_status( reason="The execution date is {0} but this is before the task's start " "date {1}.".format( ti.execution_date.isoformat(), ti.task.start_date.isoformat())) if (ti.task.dag and ti.task.dag.start_date and ti.execution_date < ti.task.dag.start_date): yield self._failing_status( reason="The execution date is {0} but this is before the task's " "DAG's start date {1}.".format( ti.execution_date.isoformat(), ti.task.dag.start_date.isoformat()))<|fim▁end|>
# regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! A set of utilities to help with common use cases that are not required to //! fully use the library. #[cfg(all(feature = "client", feature = "cache"))] mod argument_convert; mod colour; mod custom_message; mod message_builder; #[cfg(all(feature = "client", feature = "cache"))] pub use argument_convert::*; use reqwest::Url; pub use self::{ colour::{colours, Colour}, custom_message::CustomMessage, message_builder::{Content, ContentModifier, EmbedMessageBuilding, MessageBuilder}, }; pub type Color = Colour; #[cfg(feature = "cache")] use std::str::FromStr; use std::{ collections::HashMap, ffi::OsStr, fs::File, hash::{BuildHasher, Hash}, io::Read, path::Path, }; #[cfg(feature = "cache")] use crate::cache::Cache; use crate::internal::prelude::*; #[cfg(feature = "cache")] use crate::model::channel::Channel; #[cfg(feature = "cache")] use crate::model::id::{ChannelId, GuildId, RoleId, UserId}; use crate::model::{id::EmojiId, misc::EmojiIdentifier}; /// Converts a HashMap into a final [`serde_json::Map`] representation. pub fn hashmap_to_json_map<H, T>(map: HashMap<T, Value, H>) -> Map<String, Value> where H: BuildHasher, T: Eq + Hash + ToString, { let mut json_map = Map::new(); for (key, value) in map { json_map.insert(key.to_string(), value); } json_map } /// Retrieves the "code" part of an invite out of a URL. /// /// # Examples /// /// Two formats of [invite][`RichInvite`] codes are supported, both regardless of protocol prefix. /// Some examples: /// /// 1. Retrieving the code from the URL `"https://discord.gg/0cDvIgU2voY8RSYL"`: /// /// ```rust /// use serenity::utils; /// /// let url = "https://discord.gg/0cDvIgU2voY8RSYL"; /// /// assert_eq!(utils::parse_invite(url), "0cDvIgU2voY8RSYL"); /// ``` /// /// 2. Retrieving the code from the URL `"http://discord.com/invite/0cDvIgU2voY8RSYL"`: /// /// ```rust /// use serenity::utils; /// /// let url = "http://discord.com/invite/0cDvIgU2voY8RSYL"; /// /// assert_eq!(utils::parse_invite(url), "0cDvIgU2voY8RSYL"); /// ``` /// /// [`RichInvite`]: crate::model::invite::RichInvite pub fn parse_invite(code: &str) -> &str { let code = code.trim_start_matches("http://").trim_start_matches("https://"); let lower = code.to_lowercase(); if lower.starts_with("discord.gg/") { &code[11..] } else if lower.starts_with("discord.com/invite/") { &code[19..] } else { code } } /// Retrieves an Id from a user mention. /// /// If the mention is invalid, then [`None`] is returned. /// /// # Examples /// /// Retrieving an Id from a valid [`User`] mention: /// /// ```rust /// use serenity::utils::parse_username; /// /// // regular username mention /// assert_eq!(parse_username("<@114941315417899012>"), Some(114941315417899012)); /// /// // nickname mention /// assert_eq!(parse_username("<@!114941315417899012>"), Some(114941315417899012)); /// ``` /// /// Asserting that an invalid username or nickname mention returns [`None`]: /// /// ```rust /// use serenity::utils::parse_username; /// /// assert!(parse_username("<@1149413154aa17899012").is_none()); /// assert!(parse_username("<@!11494131541789a90b1c2").is_none()); /// ``` /// /// [`User`]: crate::model::user::User pub fn parse_username(mention: impl AsRef<str>) -> Option<u64> { let mention = mention.as_ref(); if mention.len() < 4 { return None; } if mention.starts_with("<@!") { let len = mention.len() - 1; mention[3..len].parse::<u64>().ok() } else if mention.starts_with("<@") { let len = mention.len() - 1; mention[2..len].parse::<u64>().ok() } else { None } } /// Retrieves an Id from a role mention. /// /// If the mention is invalid, then [`None`] is returned. /// /// # Examples /// /// Retrieving an Id from a valid [`Role`] mention: /// /// ```rust /// use serenity::utils::parse_role; /// /// assert_eq!(parse_role("<@&136107769680887808>"), Some(136107769680887808)); /// ``` /// /// Asserting that an invalid role mention returns [`None`]: /// /// ```rust /// use serenity::utils::parse_role; /// /// assert!(parse_role("<@&136107769680887808").is_none()); /// ``` /// /// [`Role`]: crate::model::guild::Role pub fn parse_role(mention: impl AsRef<str>) -> Option<u64> { let mention = mention.as_ref(); if mention.len() < 4 { return None; } if mention.starts_with("<@&") && mention.ends_with('>') { let len = mention.len() - 1; mention[3..len].parse::<u64>().ok() } else { None } } /// Retrieves an Id from a channel mention. /// /// If the channel mention is invalid, then [`None`] is returned. /// /// # Examples /// /// Retrieving an Id from a valid [`Channel`] mention: /// /// ```rust /// use serenity::utils::parse_channel; /// /// assert_eq!(parse_channel("<#81384788765712384>"), Some(81384788765712384)); /// ``` /// /// Asserting that an invalid channel mention returns [`None`]: /// /// ```rust /// use serenity::utils::parse_channel; /// /// assert!(parse_channel("<#!81384788765712384>").is_none()); /// assert!(parse_channel("<#81384788765712384").is_none()); /// ``` /// /// [`Channel`]: crate::model::channel::Channel pub fn parse_channel(mention: impl AsRef<str>) -> Option<u64> { let mention = mention.as_ref(); if mention.len() < 4 { return None; } if mention.starts_with("<#") && mention.ends_with('>') { let len = mention.len() - 1; mention[2..len].parse::<u64>().ok() } else { None } } /// Retrieve the ID number out of a channel, role, or user mention. /// /// If the mention is invalid, [`None`] is returned. /// /// # Examples /// /// ```rust /// use serenity::utils::parse_mention; /// /// assert_eq!(parse_mention("<@136510335967297536>"), Some(136510335967297536)); /// assert_eq!(parse_mention("<@&137235212097683456>"), Some(137235212097683456)); /// assert_eq!(parse_mention("<#137234234728251392>"), Some(137234234728251392)); /// ``` pub fn parse_mention(mention: impl AsRef<str>) -> Option<u64> { let mention = mention.as_ref(); if mention.starts_with("<@&") { parse_role(mention) } else if mention.starts_with("<@") || mention.starts_with("<@!") { parse_username(mention) } else if mention.starts_with("<#") { parse_channel(mention) } else { None } } /// Retrieves the animated state, name and Id from an emoji mention, in the form of an /// [`EmojiIdentifier`]. /// /// If the emoji usage is invalid, then [`None`] is returned. /// /// # Examples /// /// Ensure that a valid [`Emoji`] usage is correctly parsed: /// /// ```rust /// use serenity::model::id::{EmojiId, GuildId}; /// use serenity::model::misc::EmojiIdentifier; /// use serenity::utils::parse_emoji; /// /// let expected = EmojiIdentifier { /// animated: false, /// id: EmojiId(302516740095606785), /// name: "smugAnimeFace".to_string(), /// }; /// /// assert_eq!(parse_emoji("<:smugAnimeFace:302516740095606785>").unwrap(), expected); /// ``` /// /// Asserting that an invalid emoji usage returns [`None`]: /// /// ```rust /// use serenity::utils::parse_emoji; /// /// assert!(parse_emoji("<:smugAnimeFace:302516740095606785").is_none()); /// ``` /// /// [`Emoji`]: crate::model::guild::Emoji pub fn parse_emoji(mention: impl AsRef<str>) -> Option<EmojiIdentifier> { let mention = mention.as_ref(); let len = mention.len(); if !(6..=56).contains(&len) { return None; } if (mention.starts_with("<:") || mention.starts_with("<a:")) && mention.ends_with('>') { let mut name = String::default(); let mut id = String::default(); let animated = &mention[1..3] == "a:"; let start = if animated { 3 } else { 2 }; for (i, x) in mention[start..].chars().enumerate() { if x == ':' { let from = i + start + 1; for y in mention[from..].chars() { if y == '>' { break; } else { id.push(y); } } break; } else { name.push(x); } } match id.parse::<u64>() { Ok(x) => Some(EmojiIdentifier { animated, name, id: EmojiId(x), }), _ => None, } } else { None } } /// Reads an image from a path and encodes it into base64. /// /// This can be used for methods like [`EditProfile::avatar`]. /// /// # Examples /// /// Reads an image located at `./cat.png` into a base64-encoded string: /// /// ```rust,no_run /// use serenity::utils; /// /// let image = utils::read_image("./cat.png").expect("Failed to read image"); /// ``` /// /// # Errors /// /// Returns an [`Error::Io`] if the path does not exist. /// /// [`EditProfile::avatar`]: crate::builder::EditProfile::avatar /// [`Error::Io`]: crate::error::Error::Io #[inline] pub fn read_image<P: AsRef<Path>>(path: P) -> Result<String> { _read_image(path.as_ref()) } fn _read_image(path: &Path) -> Result<String> { let mut v = Vec::default(); let mut f = File::open(path)?; // errors here are intentionally ignored #[allow(clippy::let_underscore_must_use)] let _ = f.read_to_end(&mut v); let b64 = base64::encode(&v); let ext = if path.extension() == Some(OsStr::new("png")) { "png" } else { "jpg" }; Ok(format!("data:image/{};base64,{}", ext, b64)) } /// Turns a string into a vector of string arguments, splitting by spaces, but /// parsing content within quotes as one individual argument. /// /// # Examples /// /// Parsing two quoted commands: /// /// ```rust /// use serenity::utils::parse_quotes; /// /// let command = r#""this is the first" "this is the second""#; /// let expected = vec!["this is the first".to_string(), "this is the second".to_string()]; /// /// assert_eq!(parse_quotes(command), expected); /// ``` /// /// ```rust /// use serenity::utils::parse_quotes; /// /// let command = r#""this is a quoted command that doesn't have an ending quotation"#; /// let expected = /// vec!["this is a quoted command that doesn't have an ending quotation".to_string()]; /// /// assert_eq!(parse_quotes(command), expected); /// ``` pub fn parse_quotes(s: impl AsRef<str>) -> Vec<String> { let s = s.as_ref(); let mut args = vec![]; let mut in_string = false; let mut escaping = false; let mut current_str = String::default(); for x in s.chars() { if in_string { if x == '\\' && !escaping { escaping = true; } else if x == '"' && !escaping { if !current_str.is_empty() { args.push(current_str); } current_str = String::default(); in_string = false; } else { current_str.push(x); escaping = false; } } else if x == ' ' { if !current_str.is_empty() { args.push(current_str.clone()); } current_str = String::default(); } else if x == '"' { if !current_str.is_empty() { args.push(current_str.clone()); } in_string = true; current_str = String::default(); } else { current_str.push(x); } } if !current_str.is_empty() { args.push(current_str); } args } /// Parses the id and token from a webhook url. Expects a [`reqwest::Url`] object rather than a [`&str`]. /// /// # Examples /// /// ```rust /// use serenity::utils; /// /// let url_str = "https://discord.com/api/webhooks/245037420704169985/ig5AO-wdVWpCBtUUMxmgsWryqgsW3DChbKYOINftJ4DCrUbnkedoYZD0VOH1QLr-S3sV"; /// let url = url_str.parse().unwrap(); /// let (id, token) = utils::parse_webhook(&url).unwrap(); /// /// assert_eq!(id, 245037420704169985); /// assert_eq!(token, "ig5AO-wdVWpCBtUUMxmgsWryqgsW3DChbKYOINftJ4DCrUbnkedoYZD0VOH1QLr-S3sV"); /// ``` pub fn parse_webhook(url: &Url) -> Option<(u64, &str)> { let path = url.path().strip_prefix("/api/webhooks/")?; let split_idx = path.find('/')?; let webhook_id = &path[..split_idx]; let token = &path[split_idx + 1..]; if !["http", "https"].contains(&url.scheme()) || !["discord.com", "discordapp.com"].contains(&url.domain()?) || !(17..=20).contains(&webhook_id.len()) || !(60..=68).contains(&token.len()) { return None; } Some((webhook_id.parse().ok()?, token)) } /// Calculates the Id of the shard responsible for a guild, given its Id and /// total number of shards used. /// /// # Examples /// /// Retrieve the Id of the shard for a guild with Id `81384788765712384`, using /// 17 shards: /// /// ```rust /// use serenity::utils; /// /// assert_eq!(utils::shard_id(81384788765712384 as u64, 17), 7); /// ``` #[inline] pub fn shard_id(guild_id: impl Into<u64>, shard_count: u64) -> u64 { (guild_id.into() >> 22) % shard_count } /// Struct that allows to alter [`content_safe`]'s behaviour. #[cfg(feature = "cache")] #[derive(Clone, Debug)] pub struct ContentSafeOptions { clean_role: bool, clean_user: bool, clean_channel: bool, clean_here: bool, clean_everyone: bool, show_discriminator: bool, guild_reference: Option<GuildId>, } #[cfg(feature = "cache")] impl ContentSafeOptions { pub fn new() -> Self { ContentSafeOptions::default() } /// [`content_safe`] will replace role mentions (`<@&{id}>`) with its name /// prefixed with `@` (`@rolename`) or with `@deleted-role` if the /// identifier is invalid. pub fn clean_role(mut self, b: bool) -> Self { self.clean_role = b; self } /// If set to true, [`content_safe`] will replace user mentions /// (`<@!{id}>` or `<@{id}>`) with the user's name prefixed with `@` /// (`@username`) or with `@invalid-user` if the identifier is invalid. pub fn clean_user(mut self, b: bool) -> Self { self.clean_user = b; self } /// If set to true, [`content_safe`] will replace channel mentions /// (`<#{id}>`) with the channel's name prefixed with `#` /// (`#channelname`) or with `#deleted-channel` if the identifier is /// invalid. pub fn clean_channel(mut self, b: bool) -> Self { self.clean_channel = b; self } /// If set to true, if [`content_safe`] replaces a user mention it will /// add their four digit discriminator with a preceeding `#`, /// turning `@username` to `@username#discriminator`. pub fn show_discriminator(mut self, b: bool) -> Self { self.show_discriminator = b; self } /// If set, [`content_safe`] will replace a user mention with the user's /// display name in passed `guild`. pub fn display_as_member_from<G: Into<GuildId>>(mut self, guild: G) -> Self { self.guild_reference = Some(guild.into()); self } /// If set, [`content_safe`] will replace `@here` with a non-pinging /// alternative. pub fn clean_here(mut self, b: bool) -> Self { self.clean_here = b; self } /// If set, [`content_safe`] will replace `@everyone` with a non-pinging /// alternative. pub fn clean_everyone(mut self, b: bool) -> Self { self.clean_everyone = b; self } } #[cfg(feature = "cache")] impl Default for ContentSafeOptions { /// Instantiates with all options set to `true`. fn default() -> Self { ContentSafeOptions { clean_role: true, clean_user: true, clean_channel: true, clean_here: true, clean_everyone: true, show_discriminator: true, guild_reference: None, } } } #[cfg(feature = "cache")] #[inline] async fn clean_roles(cache: impl AsRef<Cache>, s: &mut String) { let mut progress = 0; while let Some(mut mention_start) = s[progress..].find("<@&") { mention_start += progress; if let Some(mut mention_end) = s[mention_start..].find('>') { mention_end += mention_start; mention_start += "<@&".len(); if let Ok(id) = RoleId::from_str(&s[mention_start..mention_end]) { let to_replace = format!("<@&{}>", &s[mention_start..mention_end]); *s = if let Some(role) = id.to_role_cached(&cache).await { s.replace(&to_replace, &format!("@{}", &role.name)) } else { s.replace(&to_replace, "@deleted-role") }; } else { let id = &s[mention_start..mention_end].to_string(); if !id.is_empty() && id.as_bytes().iter().all(u8::is_ascii_digit) { let to_replace = format!("<@&{}>", id); *s = s.replace(&to_replace, "@deleted-role"); } else { progress = mention_end; } } } else { break; } } } #[cfg(feature = "cache")]<|fim▁hole|> while let Some(mut mention_start) = s[progress..].find("<#") { mention_start += progress; if let Some(mut mention_end) = s[mention_start..].find('>') { mention_end += mention_start; mention_start += "<#".len(); if let Ok(id) = ChannelId::from_str(&s[mention_start..mention_end]) { let to_replace = format!("<#{}>", &s[mention_start..mention_end]); *s = if let Some(Channel::Guild(channel)) = id.to_channel_cached(&cache).await { let replacement = format!("#{}", &channel.name); s.replace(&to_replace, &replacement) } else { s.replace(&to_replace, "#deleted-channel") }; } else { let id = &s[mention_start..mention_end].to_string(); if !id.is_empty() && id.as_bytes().iter().all(u8::is_ascii_digit) { let to_replace = format!("<#{}>", id); *s = s.replace(&to_replace, "#deleted-channel"); } else { progress = mention_end; } } } else { break; } } } #[cfg(feature = "cache")] #[inline] async fn clean_users( cache: &impl AsRef<Cache>, s: &mut String, show_discriminator: bool, guild: Option<GuildId>, ) { let cache = cache.as_ref(); let mut progress = 0; while let Some(mut mention_start) = s[progress..].find("<@") { mention_start += progress; if let Some(mut mention_end) = s[mention_start..].find('>') { mention_end += mention_start; mention_start += "<@".len(); let has_exclamation = if s[mention_start..].as_bytes().get(0).map_or(false, |c| *c == b'!') { mention_start += "!".len(); true } else { false }; if let Ok(id) = UserId::from_str(&s[mention_start..mention_end]) { let replacement = if let Some(guild_id) = guild { if let Some(guild) = cache.guild(&guild_id).await { if let Some(member) = guild.members.get(&id) { if show_discriminator { format!("@{}", member.distinct()) } else { format!("@{}", member.display_name()) } } else { "@invalid-user".to_string() } } else { "@invalid-user".to_string() } } else if let Some(user) = cache.user(id).await { if show_discriminator { format!("@{}#{:04}", user.name, user.discriminator) } else { format!("@{}", user.name) } } else { "@invalid-user".to_string() }; let code_start = if has_exclamation { "<@!" } else { "<@" }; let to_replace = format!("{}{}>", code_start, &s[mention_start..mention_end]); *s = s.replace(&to_replace, &replacement) } else { let id = &s[mention_start..mention_end].to_string(); if !id.is_empty() && id.as_bytes().iter().all(u8::is_ascii_digit) { let code_start = if has_exclamation { "<@!" } else { "<@" }; let to_replace = format!("{}{}>", code_start, id); *s = s.replace(&to_replace, "@invalid-user"); } else { progress = mention_end; } } } else { break; } } } /// Transforms role, channel, user, `@everyone` and `@here` mentions /// into raw text by using the [`Cache`] only. /// /// [`ContentSafeOptions`] decides what kind of mentions should be filtered /// and how the raw-text will be displayed. /// /// # Examples /// /// Sanitise an `@everyone` mention. /// /// ```rust /// # use std::sync::Arc; /// # use serenity::client::Cache; /// # use tokio::sync::RwLock; /// # /// # async fn run() { /// # let cache = Cache::default(); /// use serenity::utils::{content_safe, ContentSafeOptions}; /// /// let with_mention = "@everyone"; /// let without_mention = content_safe(&cache, &with_mention, &ContentSafeOptions::default()).await; /// /// assert_eq!("@\u{200B}everyone".to_string(), without_mention); /// # } /// ``` /// /// [`Cache`]: crate::cache::Cache #[cfg(feature = "cache")] pub async fn content_safe( cache: impl AsRef<Cache>, s: impl AsRef<str>, options: &ContentSafeOptions, ) -> String { let mut content = s.as_ref().to_string(); if options.clean_role { clean_roles(&cache, &mut content).await; } if options.clean_channel { clean_channels(&cache, &mut content).await; } if options.clean_user { clean_users(&cache, &mut content, options.show_discriminator, options.guild_reference) .await; } if options.clean_here { content = content.replace("@here", "@\u{200B}here"); } if options.clean_everyone { content = content.replace("@everyone", "@\u{200B}everyone"); } content } #[cfg(test)] #[allow(clippy::unwrap_used, clippy::non_ascii_literal)] mod test { use super::*; #[cfg(feature = "cache")] use crate::cache::Cache; #[test] fn test_invite_parser() { assert_eq!(parse_invite("https://discord.gg/abc"), "abc"); assert_eq!(parse_invite("http://discord.gg/abc"), "abc"); assert_eq!(parse_invite("discord.gg/abc"), "abc"); assert_eq!(parse_invite("DISCORD.GG/ABC"), "ABC"); assert_eq!(parse_invite("https://discord.com/invite/abc"), "abc"); assert_eq!(parse_invite("http://discord.com/invite/abc"), "abc"); assert_eq!(parse_invite("discord.com/invite/abc"), "abc"); } #[test] fn test_username_parser() { assert_eq!(parse_username("<@12345>").unwrap(), 12_345); assert_eq!(parse_username("<@!12345>").unwrap(), 12_345); } #[test] fn role_parser() { assert_eq!(parse_role("<@&12345>").unwrap(), 12_345); } #[test] fn test_channel_parser() { assert_eq!(parse_channel("<#12345>").unwrap(), 12_345); } #[test] fn test_emoji_parser() { let emoji = parse_emoji("<:name:12345>").unwrap(); assert_eq!(emoji.name, "name"); assert_eq!(emoji.id, 12_345); } #[test] fn test_quote_parser() { let parsed = parse_quotes("a \"b c\" d\"e f\" g"); assert_eq!(parsed, ["a", "b c", "d", "e f", "g"]); } #[test] fn test_webhook_parser() { let url = "https://discord.com/api/webhooks/245037420704169985/ig5AO-wdVWpCBtUUMxmgsWryqgsW3DChbKYOINftJ4DCrUbnkedoYZD0VOH1QLr-S3sV".parse().unwrap(); let (id, token) = parse_webhook(&url).unwrap(); assert_eq!(id, 245037420704169985); assert_eq!(token, "ig5AO-wdVWpCBtUUMxmgsWryqgsW3DChbKYOINftJ4DCrUbnkedoYZD0VOH1QLr-S3sV"); } #[cfg(feature = "cache")] #[tokio::test] async fn test_content_safe() { use std::{collections::HashMap, sync::Arc}; use chrono::{DateTime, Utc}; use crate::model::{prelude::*, user::User, Permissions}; let user = User { id: UserId(100000000000000000), avatar: None, bot: false, discriminator: 0000, name: "Crab".to_string(), public_flags: None, banner: None, accent_colour: None, }; #[allow(deprecated)] let mut guild = Guild { afk_channel_id: None, afk_timeout: 0, application_id: None, channels: HashMap::new(), default_message_notifications: DefaultMessageNotificationLevel::All, emojis: HashMap::new(), explicit_content_filter: ExplicitContentFilter::None, features: Vec::new(), icon: None, id: GuildId(381880193251409931), joined_at: DateTime::parse_from_str( "1983 Apr 13 12:09:14.274 +0000", "%Y %b %d %H:%M:%S%.3f %z", ) .unwrap() .with_timezone(&Utc), large: false, member_count: 1, members: HashMap::new(), mfa_level: MfaLevel::None, name: "serenity".to_string(), owner_id: UserId(114941315417899012), presences: HashMap::new(), region: "Ferris Island".to_string(), roles: HashMap::new(), splash: None, discovery_splash: None, system_channel_id: None, system_channel_flags: Default::default(), rules_channel_id: None, public_updates_channel_id: None, verification_level: VerificationLevel::None, voice_states: HashMap::new(), description: None, premium_tier: PremiumTier::Tier0, premium_subscription_count: 0, banner: None, vanity_url_code: Some("bruhmoment1".to_string()), preferred_locale: "en-US".to_string(), welcome_screen: None, approximate_member_count: None, approximate_presence_count: None, nsfw: false, nsfw_level: NsfwLevel::Default, max_video_channel_users: None, max_presences: None, max_members: None, widget_enabled: Some(false), widget_channel_id: None, stage_instances: vec![], threads: vec![], }; let member = Member { deaf: false, guild_id: guild.id, joined_at: None, mute: false, nick: Some("Ferris".to_string()), roles: Vec::new(), user: user.clone(), pending: false, premium_since: None, #[cfg(feature = "unstable_discord_api")] permissions: None, avatar: None, communication_disabled_until: None, }; let role = Role { id: RoleId(333333333333333333), colour: Colour::ORANGE, guild_id: guild.id, hoist: true, managed: false, mentionable: true, name: "ferris-club-member".to_string(), permissions: Permissions::all(), position: 0, tags: RoleTags::default(), icon: None, unicode_emoji: None, }; let channel = GuildChannel { id: ChannelId(111880193700067777), bitrate: None, category_id: None, guild_id: guild.id, kind: ChannelType::Text, last_message_id: None, last_pin_timestamp: None, name: "general".to_string(), permission_overwrites: Vec::new(), position: 0, topic: None, user_limit: None, nsfw: false, slow_mode_rate: Some(0), rtc_region: None, video_quality_mode: None, message_count: None, member_count: None, thread_metadata: None, member: None, default_auto_archive_duration: None, }; let cache = Arc::new(Cache::default()); guild.members.insert(user.id, member.clone()); guild.roles.insert(role.id, role.clone()); cache.users.write().await.insert(user.id, user.clone()); cache.guilds.write().await.insert(guild.id, guild.clone()); cache.channels.write().await.insert(channel.id, channel.clone()); let with_user_mentions = "<@!100000000000000000> <@!000000000000000000> <@123> <@!123> \ <@!123123123123123123123> <@123> <@123123123123123123> <@!invalid> \ <@invalid> <@日本語 한국어$§)[/__#\\(/&2032$§#> \ <@!i)/==(<<>z/9080)> <@!1231invalid> <@invalid123> \ <@123invalid> <@> <@ "; let without_user_mentions = "@Crab#0000 @invalid-user @invalid-user @invalid-user \ @invalid-user @invalid-user @invalid-user <@!invalid> \ <@invalid> <@日本語 한국어$§)[/__#\\(/&2032$§#> \ <@!i)/==(<<>z/9080)> <@!1231invalid> <@invalid123> \ <@123invalid> <@> <@ "; // User mentions let options = ContentSafeOptions::default(); assert_eq!(without_user_mentions, content_safe(&cache, with_user_mentions, &options).await); let options = ContentSafeOptions::default(); assert_eq!( format!("@{}#{:04}", user.name, user.discriminator), content_safe(&cache, "<@!100000000000000000>", &options).await ); let options = ContentSafeOptions::default(); assert_eq!( format!("@{}#{:04}", user.name, user.discriminator), content_safe(&cache, "<@100000000000000000>", &options).await ); let options = options.show_discriminator(false); assert_eq!( format!("@{}", user.name), content_safe(&cache, "<@!100000000000000000>", &options).await ); let options = options.show_discriminator(false); assert_eq!( format!("@{}", user.name), content_safe(&cache, "<@100000000000000000>", &options).await ); let options = options.display_as_member_from(guild.id); assert_eq!( format!("@{}", member.nick.unwrap()), content_safe(&cache, "<@!100000000000000000>", &options).await ); let options = options.clean_user(false); assert_eq!(with_user_mentions, content_safe(&cache, with_user_mentions, &options).await); // Channel mentions let with_channel_mentions = "<#> <#deleted-channel> #deleted-channel <#0> \ #unsafe-club <#111880193700067777> <#ferrisferrisferris> \ <#000000000000000000>"; let without_channel_mentions = "<#> <#deleted-channel> #deleted-channel \ #deleted-channel #unsafe-club #general <#ferrisferrisferris> \ #deleted-channel"; assert_eq!( without_channel_mentions, content_safe(&cache, with_channel_mentions, &options).await ); let options = options.clean_channel(false); assert_eq!( with_channel_mentions, content_safe(&cache, with_channel_mentions, &options).await ); // Role mentions let with_role_mentions = "<@&> @deleted-role <@&9829> \ <@&333333333333333333> <@&000000000000000000>"; let without_role_mentions = "<@&> @deleted-role @deleted-role \ @ferris-club-member @deleted-role"; assert_eq!(without_role_mentions, content_safe(&cache, with_role_mentions, &options).await); let options = options.clean_role(false); assert_eq!(with_role_mentions, content_safe(&cache, with_role_mentions, &options).await); // Everyone mentions let with_everyone_mention = "@everyone"; let without_everyone_mention = "@\u{200B}everyone"; assert_eq!( without_everyone_mention, content_safe(&cache, with_everyone_mention, &options).await ); let options = options.clean_everyone(false); assert_eq!( with_everyone_mention, content_safe(&cache, with_everyone_mention, &options).await ); // Here mentions let with_here_mention = "@here"; let without_here_mention = "@\u{200B}here"; assert_eq!(without_here_mention, content_safe(&cache, with_here_mention, &options).await); let options = options.clean_here(false); assert_eq!(with_here_mention, content_safe(&cache, with_here_mention, &options).await); } }<|fim▁end|>
#[inline] async fn clean_channels(cache: &impl AsRef<Cache>, s: &mut String) { let mut progress = 0;
<|file_name|>210-Evt-EventListeners.cpp<|end_file_name|><|fim▁begin|>// 210-Evt-EventListeners.cpp // Contents: // 1. Printing of listener data // 2. My listener and registration // 3. Test cases // main() provided in 000-CatchMain.cpp // Let Catch provide the required interfaces: #define CATCH_CONFIG_EXTERNAL_INTERFACES #include <catch2/catch.hpp> #include <iostream> // ----------------------------------------------------------------------- // 1. Printing of listener data: // std::string ws(int const level) { return std::string( 2 * level, ' ' ); } template< typename T > std::ostream& operator<<( std::ostream& os, std::vector<T> const& v ) { os << "{ "; for ( const auto& x : v ) os << x << ", "; return os << "}"; } // struct SourceLineInfo { // char const* file; // std::size_t line; // }; void print( std::ostream& os, int const level, std::string const& title, Catch::SourceLineInfo const& info ) { os << ws(level ) << title << ":\n" << ws(level+1) << "- file: " << info.file << "\n" << ws(level+1) << "- line: " << info.line << "\n"; } //struct MessageInfo { // std::string macroName; // std::string message; // SourceLineInfo lineInfo; // ResultWas::OfType type; // unsigned int sequence; //}; void print( std::ostream& os, int const level, Catch::MessageInfo const& info ) { os << ws(level+1) << "- macroName: '" << info.macroName << "'\n" << ws(level+1) << "- message '" << info.message << "'\n"; print( os,level+1 , "- lineInfo", info.lineInfo ); os << ws(level+1) << "- sequence " << info.sequence << "\n"; } void print( std::ostream& os, int const level, std::string const& title, std::vector<Catch::MessageInfo> const& v ) { os << ws(level ) << title << ":\n"; for ( const auto& x : v ) { os << ws(level+1) << "{\n"; print( os, level+2, x ); os << ws(level+1) << "}\n"; } // os << ws(level+1) << "\n"; } // struct TestRunInfo { // std::string name; // }; void print( std::ostream& os, int const level, std::string const& title, Catch::TestRunInfo const& info ) { os << ws(level ) << title << ":\n" << ws(level+1) << "- name: " << info.name << "\n"; } // struct Counts { // std::size_t total() const; // bool allPassed() const; // bool allOk() const; // // std::size_t passed = 0; // std::size_t failed = 0; // std::size_t failedButOk = 0; // }; void print( std::ostream& os, int const level, std::string const& title, Catch::Counts const& info ) { os << ws(level ) << title << ":\n" << ws(level+1) << "- total(): " << info.total() << "\n" << ws(level+1) << "- allPassed(): " << info.allPassed() << "\n" << ws(level+1) << "- allOk(): " << info.allOk() << "\n" << ws(level+1) << "- passed: " << info.passed << "\n" << ws(level+1) << "- failed: " << info.failed << "\n" << ws(level+1) << "- failedButOk: " << info.failedButOk << "\n"; } // struct Totals { // Counts assertions; // Counts testCases; // }; void print( std::ostream& os, int const level, std::string const& title, Catch::Totals const& info ) { os << ws(level) << title << ":\n"; print( os, level+1, "- assertions", info.assertions ); print( os, level+1, "- testCases" , info.testCases ); } // struct TestRunStats { // TestRunInfo runInfo; // Totals totals; // bool aborting; // }; void print( std::ostream& os, int const level, std::string const& title, Catch::TestRunStats const& info ) { os << ws(level) << title << ":\n"; print( os, level+1 , "- runInfo", info.runInfo ); print( os, level+1 , "- totals" , info.totals ); os << ws(level+1) << "- aborting: " << info.aborting << "\n"; } // struct TestCaseInfo { // enum SpecialProperties{ // None = 0, // IsHidden = 1 << 1, // ShouldFail = 1 << 2, // MayFail = 1 << 3, // Throws = 1 << 4, // NonPortable = 1 << 5, // Benchmark = 1 << 6 // }; // // bool isHidden() const; // bool throws() const; // bool okToFail() const; // bool expectedToFail() const; // // std::string tagsAsString() const; // // std::string name; // std::string className; // std::string description; // std::vector<std::string> tags; // std::vector<std::string> lcaseTags; // SourceLineInfo lineInfo; // SpecialProperties properties; // }; void print( std::ostream& os, int const level, std::string const& title, Catch::TestCaseInfo const& info ) { os << ws(level ) << title << ":\n" << ws(level+1) << "- isHidden(): " << info.isHidden() << "\n" << ws(level+1) << "- throws(): " << info.throws() << "\n" << ws(level+1) << "- okToFail(): " << info.okToFail() << "\n" << ws(level+1) << "- expectedToFail(): " << info.expectedToFail() << "\n" << ws(level+1) << "- tagsAsString(): '" << info.tagsAsString() << "'\n" << ws(level+1) << "- name: '" << info.name << "'\n" << ws(level+1) << "- className: '" << info.className << "'\n" << ws(level+1) << "- description: '" << info.description << "'\n" << ws(level+1) << "- tags: " << info.tags << "\n" << ws(level+1) << "- lcaseTags: " << info.lcaseTags << "\n"; print( os, level+1 , "- lineInfo", info.lineInfo ); os << ws(level+1) << "- properties (flags): 0x" << std::hex << info.properties << std::dec << "\n"; } // struct TestCaseStats { // TestCaseInfo testInfo; // Totals totals; // std::string stdOut; // std::string stdErr; // bool aborting; // }; void print( std::ostream& os, int const level, std::string const& title, Catch::TestCaseStats const& info ) { os << ws(level ) << title << ":\n"; print( os, level+1 , "- testInfo", info.testInfo ); print( os, level+1 , "- totals" , info.totals ); os << ws(level+1) << "- stdOut: " << info.stdOut << "\n" << ws(level+1) << "- stdErr: " << info.stdErr << "\n" << ws(level+1) << "- aborting: " << info.aborting << "\n"; } // struct SectionInfo { // std::string name; // std::string description; // SourceLineInfo lineInfo; // }; void print( std::ostream& os, int const level, std::string const& title, Catch::SectionInfo const& info ) { os << ws(level ) << title << ":\n" << ws(level+1) << "- name: " << info.name << "\n"; print( os, level+1 , "- lineInfo", info.lineInfo ); } // struct SectionStats { // SectionInfo sectionInfo; // Counts assertions; // double durationInSeconds; // bool missingAssertions; // }; void print( std::ostream& os, int const level, std::string const& title, Catch::SectionStats const& info ) { os << ws(level ) << title << ":\n"; print( os, level+1 , "- sectionInfo", info.sectionInfo ); print( os, level+1 , "- assertions" , info.assertions ); os << ws(level+1) << "- durationInSeconds: " << info.durationInSeconds << "\n" << ws(level+1) << "- missingAssertions: " << info.missingAssertions << "\n"; } // struct AssertionInfo // { // StringRef macroName; // SourceLineInfo lineInfo; // StringRef capturedExpression; // ResultDisposition::Flags resultDisposition; // }; void print( std::ostream& os, int const level, std::string const& title, Catch::AssertionInfo const& info ) { os << ws(level ) << title << ":\n" << ws(level+1) << "- macroName: '" << info.macroName << "'\n"; print( os, level+1 , "- lineInfo" , info.lineInfo ); os << ws(level+1) << "- capturedExpression: '" << info.capturedExpression << "'\n" << ws(level+1) << "- resultDisposition (flags): 0x" << std::hex << info.resultDisposition << std::dec << "\n"; } //struct AssertionResultData //{ // std::string reconstructExpression() const; // // std::string message; // mutable std::string reconstructedExpression; // LazyExpression lazyExpression; // ResultWas::OfType resultType; //}; void print( std::ostream& os, int const level, std::string const& title, Catch::AssertionResultData const& info ) { os << ws(level ) << title << ":\n" << ws(level+1) << "- reconstructExpression(): '" << info.reconstructExpression() << "'\n" << ws(level+1) << "- message: '" << info.message << "'\n" << ws(level+1) << "- lazyExpression: '" << "(info.lazyExpression)" << "'\n" << ws(level+1) << "- resultType: '" << info.resultType << "'\n"; } //class AssertionResult { // bool isOk() const; // bool succeeded() const; // ResultWas::OfType getResultType() const; // bool hasExpression() const; // bool hasMessage() const; // std::string getExpression() const; // std::string getExpressionInMacro() const; // bool hasExpandedExpression() const; // std::string getExpandedExpression() const; // std::string getMessage() const; // SourceLineInfo getSourceInfo() const; // std::string getTestMacroName() const; // // AssertionInfo m_info; // AssertionResultData m_resultData; //}; void print( std::ostream& os, int const level, std::string const& title, Catch::AssertionResult const& info ) { os << ws(level ) << title << ":\n" << ws(level+1) << "- isOk(): " << info.isOk() << "\n" << ws(level+1) << "- succeeded(): " << info.succeeded() << "\n" << ws(level+1) << "- getResultType(): " << info.getResultType() << "\n" << ws(level+1) << "- hasExpression(): " << info.hasExpression() << "\n" << ws(level+1) << "- hasMessage(): " << info.hasMessage() << "\n" << ws(level+1) << "- getExpression(): '" << info.getExpression() << "'\n" << ws(level+1) << "- getExpressionInMacro(): '" << info.getExpressionInMacro() << "'\n" << ws(level+1) << "- hasExpandedExpression(): " << info.hasExpandedExpression() << "\n" << ws(level+1) << "- getExpandedExpression(): " << info.getExpandedExpression() << "'\n" << ws(level+1) << "- getMessage(): '" << info.getMessage() << "'\n"; print( os, level+1 , "- getSourceInfo(): ", info.getSourceInfo() ); os << ws(level+1) << "- getTestMacroName(): '" << info.getTestMacroName() << "'\n"; // print( os, level+1 , "- *** m_info (AssertionInfo)", info.m_info ); // print( os, level+1 , "- *** m_resultData (AssertionResultData)", info.m_resultData ); } // struct AssertionStats { // AssertionResult assertionResult; // std::vector<MessageInfo> infoMessages; // Totals totals; // }; void print( std::ostream& os, int const level, std::string const& title, Catch::AssertionStats const& info ) { os << ws(level ) << title << ":\n"; print( os, level+1 , "- assertionResult", info.assertionResult ); print( os, level+1 , "- infoMessages", info.infoMessages ); print( os, level+1 , "- totals", info.totals ); } // ----------------------------------------------------------------------- // 2. My listener and registration: // char const * dashed_line = "--------------------------------------------------------------------------"; struct MyListener : Catch::TestEventListenerBase { using TestEventListenerBase::TestEventListenerBase; // inherit constructor // Get rid of Wweak-tables ~MyListener(); // The whole test run starting void testRunStarting( Catch::TestRunInfo const& testRunInfo ) override { std::cout << std::boolalpha << "\nEvent: testRunStarting:\n"; print( std::cout, 1, "- testRunInfo", testRunInfo ); } // The whole test run ending void testRunEnded( Catch::TestRunStats const& testRunStats ) override { std::cout << dashed_line << "\nEvent: testRunEnded:\n"; print( std::cout, 1, "- testRunStats", testRunStats ); } // A test is being skipped (because it is "hidden") void skipTest( Catch::TestCaseInfo const& testInfo ) override { std::cout << dashed_line << "\nEvent: skipTest:\n"; print( std::cout, 1, "- testInfo", testInfo ); } // Test cases starting void testCaseStarting( Catch::TestCaseInfo const& testInfo ) override { std::cout << dashed_line << "\nEvent: testCaseStarting:\n"; print( std::cout, 1, "- testInfo", testInfo ); } // Test cases ending void testCaseEnded( Catch::TestCaseStats const& testCaseStats ) override { std::cout << "\nEvent: testCaseEnded:\n"; print( std::cout, 1, "testCaseStats", testCaseStats ); } // Sections starting void sectionStarting( Catch::SectionInfo const& sectionInfo ) override { std::cout << "\nEvent: sectionStarting:\n"; print( std::cout, 1, "- sectionInfo", sectionInfo ); } // Sections ending void sectionEnded( Catch::SectionStats const& sectionStats ) override { std::cout << "\nEvent: sectionEnded:\n"; print( std::cout, 1, "- sectionStats", sectionStats ); } // Assertions before/ after void assertionStarting( Catch::AssertionInfo const& assertionInfo ) override { std::cout << "\nEvent: assertionStarting:\n"; print( std::cout, 1, "- assertionInfo", assertionInfo ); } bool assertionEnded( Catch::AssertionStats const& assertionStats ) override { std::cout << "\nEvent: assertionEnded:\n"; print( std::cout, 1, "- assertionStats", assertionStats ); return true; } }; CATCH_REGISTER_LISTENER( MyListener ) // Get rid of Wweak-tables MyListener::~MyListener() {} // ----------------------------------------------------------------------- // 3. Test cases: // TEST_CASE( "1: Hidden testcase", "[.hidden]" ) { } TEST_CASE( "2: Testcase with sections", "[tag-A][tag-B]" ) { int i = 42; REQUIRE( i == 42 ); SECTION("Section 1") { INFO("Section 1"); i = 7; SECTION("Section 1.1") { INFO("Section 1.1"); REQUIRE( i == 42 ); } } SECTION("Section 2") { INFO("Section 2"); REQUIRE( i == 42 ); } WARN("At end of test case"); } struct Fixture { int fortytwo() const { return 42; } }; TEST_CASE_METHOD( Fixture, "3: Testcase with class-based fixture", "[tag-C][tag-D]" ) { REQUIRE( fortytwo() == 42 ); }<|fim▁hole|> // Compile & run: // - g++ -std=c++11 -Wall -I$(CATCH_SINGLE_INCLUDE) -o 210-Evt-EventListeners 210-Evt-EventListeners.cpp 000-CatchMain.o && 210-Evt-EventListeners --success // - cl -EHsc -I%CATCH_SINGLE_INCLUDE% 210-Evt-EventListeners.cpp 000-CatchMain.obj && 210-Evt-EventListeners --success // Expected compact output (all assertions): // // prompt> 210-Evt-EventListeners --reporter compact --success // result omitted for brevity.<|fim▁end|>
<|file_name|>table.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! CSS table formatting contexts. #![deny(unsafe_code)] use app_units::Au; use block::{BlockFlow, CandidateBSizeIterator, ISizeAndMarginsComputer}; use block::{ISizeConstraintInput, ISizeConstraintSolution}; use context::LayoutContext; use display_list_builder::{BlockFlowDisplayListBuilding, BorderPaintingMode, DisplayListBuildState}; use euclid::Point2D; use flow; use flow::{BaseFlow, EarlyAbsolutePositionInfo, Flow, FlowClass, ImmutableFlowUtils, OpaqueFlow}; use flow_list::MutFlowListIterator; use fragment::{Fragment, FragmentBorderBoxIterator, Overflow}; use gfx_traits::print_tree::PrintTree; use layout_debug; use model::{IntrinsicISizes, IntrinsicISizesContribution, MaybeAuto}; use std::cmp; use std::fmt; use style::computed_values::{border_collapse, border_spacing, table_layout}; use style::context::SharedStyleContext; use style::logical_geometry::LogicalSize; use style::properties::ServoComputedValues; use style::servo::restyle_damage::{REFLOW, REFLOW_OUT_OF_FLOW}; use style::values::CSSFloat; use style::values::computed::LengthOrPercentageOrAuto; use table_row::{self, CellIntrinsicInlineSize, CollapsedBorder, CollapsedBorderProvenance}; use table_row::TableRowFlow; use table_wrapper::TableLayout; /// A table flow corresponded to the table's internal table fragment under a table wrapper flow. /// The properties `position`, `float`, and `margin-*` are used on the table wrapper fragment, /// not table fragment per CSS 2.1 § 10.5. #[derive(Serialize)] pub struct TableFlow { pub block_flow: BlockFlow, /// Information about the intrinsic inline-sizes of each column, computed bottom-up during /// intrinsic inline-size bubbling. pub column_intrinsic_inline_sizes: Vec<ColumnIntrinsicInlineSize>, /// Information about the actual inline sizes of each column, computed top-down during actual /// inline-size bubbling. pub column_computed_inline_sizes: Vec<ColumnComputedInlineSize>, /// The final width of the borders in the inline direction for each cell, computed by the /// entire table and pushed down into each row during inline size computation. pub collapsed_inline_direction_border_widths_for_table: Vec<Au>, /// The final width of the borders in the block direction for each cell, computed by the /// entire table and pushed down into each row during inline size computation. pub collapsed_block_direction_border_widths_for_table: Vec<Au>, /// Table-layout property pub table_layout: TableLayout, } impl TableFlow { pub fn from_fragment(fragment: Fragment) -> TableFlow { let mut block_flow = BlockFlow::from_fragment(fragment); let table_layout = if block_flow.fragment().style().get_table().table_layout == table_layout::T::fixed { TableLayout::Fixed } else { TableLayout::Auto }; TableFlow { block_flow: block_flow, column_intrinsic_inline_sizes: Vec::new(), column_computed_inline_sizes: Vec::new(), collapsed_inline_direction_border_widths_for_table: Vec::new(), collapsed_block_direction_border_widths_for_table: Vec::new(), table_layout: table_layout } } /// Update the corresponding value of `self_inline_sizes` if a value of `kid_inline_sizes` has /// a larger value than one of `self_inline_sizes`. Returns the minimum and preferred inline /// sizes. fn update_automatic_column_inline_sizes( parent_inline_sizes: &mut Vec<ColumnIntrinsicInlineSize>, child_cell_inline_sizes: &[CellIntrinsicInlineSize], surrounding_size: Au) -> IntrinsicISizes { let mut total_inline_sizes = IntrinsicISizes { minimum_inline_size: surrounding_size, preferred_inline_size: surrounding_size, }; let mut column_index = 0; let mut incoming_rowspan = vec![]; for child_cell_inline_size in child_cell_inline_sizes { // Skip any column occupied by a cell from a previous row. while column_index < incoming_rowspan.len() && incoming_rowspan[column_index] != 1 { if incoming_rowspan[column_index] > 1 { incoming_rowspan[column_index] -= 1; } column_index += 1; } for _ in 0..child_cell_inline_size.column_span { if column_index < parent_inline_sizes.len() { // We already have some intrinsic size information for this column. Merge it in // according to the rules specified in INTRINSIC § 4. let parent_sizes = &mut parent_inline_sizes[column_index]; if child_cell_inline_size.column_span > 1 { // TODO(pcwalton): Perform the recursive algorithm specified in INTRINSIC § // 4. For now we make this column contribute no width. } else { let column_size = &child_cell_inline_size.column_size; *parent_sizes = ColumnIntrinsicInlineSize { minimum_length: cmp::max(parent_sizes.minimum_length, column_size.minimum_length), percentage: parent_sizes.greatest_percentage(column_size), preferred: cmp::max(parent_sizes.preferred, column_size.preferred), constrained: parent_sizes.constrained || column_size.constrained, } } } else { // We discovered a new column. Initialize its data. debug_assert!(column_index == parent_inline_sizes.len()); if child_cell_inline_size.column_span > 1 { // TODO(pcwalton): Perform the recursive algorithm specified in INTRINSIC § // 4. For now we make this column contribute no width. parent_inline_sizes.push(ColumnIntrinsicInlineSize::new()) } else { parent_inline_sizes.push(child_cell_inline_size.column_size) } } total_inline_sizes.minimum_inline_size += parent_inline_sizes[column_index].minimum_length; total_inline_sizes.preferred_inline_size += parent_inline_sizes[column_index].preferred; // If this cell spans later rows, record its rowspan. if child_cell_inline_size.row_span > 1 { if incoming_rowspan.len() < column_index + 1 { incoming_rowspan.resize(column_index + 1, 0); } incoming_rowspan[column_index] = child_cell_inline_size.row_span; } column_index += 1 } } total_inline_sizes } /// Updates the minimum and preferred inline-size calculation for a single row. This is /// factored out into a separate function because we process children of rowgroups too. fn update_column_inline_sizes_for_row(row: &TableRowFlow, column_inline_sizes: &mut Vec<ColumnIntrinsicInlineSize>, computation: &mut IntrinsicISizesContribution, first_row: bool, table_layout: TableLayout, surrounding_inline_size: Au) { // Read column inline-sizes from the table-row, and assign inline-size=0 for the columns // not defined in the column group. // // FIXME: Need to read inline-sizes from either table-header-group OR the first table-row. match table_layout { TableLayout::Fixed => { // Fixed table layout only looks at the first row. // // FIXME(pcwalton): This is really inefficient. We should stop after the first row! if first_row { for cell_inline_size in &row.cell_intrinsic_inline_sizes { column_inline_sizes.push(cell_inline_size.column_size); } } } TableLayout::Auto => { computation.union_block(&TableFlow::update_automatic_column_inline_sizes( column_inline_sizes, &row.cell_intrinsic_inline_sizes, surrounding_inline_size)) } } } /// Returns the effective spacing per cell, taking the value of `border-collapse` into account. pub fn spacing(&self) -> border_spacing::T { let style = self.block_flow.fragment.style(); match style.get_inheritedtable().border_collapse { border_collapse::T::separate => style.get_inheritedtable().border_spacing, border_collapse::T::collapse => { border_spacing::T { horizontal: Au(0), vertical: Au(0), } } } } pub fn total_horizontal_spacing(&self) -> Au { let num_columns = self.column_intrinsic_inline_sizes.len(); if num_columns == 0 { return Au(0); } self.spacing().horizontal * (num_columns as i32 + 1) } } impl Flow for TableFlow { fn class(&self) -> FlowClass { FlowClass::Table } fn as_mut_table(&mut self) -> &mut TableFlow { self } fn as_table(&self) -> &TableFlow { self } fn as_mut_block(&mut self) -> &mut BlockFlow { &mut self.block_flow } fn as_block(&self) -> &BlockFlow { &self.block_flow } fn mark_as_root(&mut self) { self.block_flow.mark_as_root(); } /// The specified column inline-sizes are set from column group and the first row for the fixed /// table layout calculation. /// The maximum min/pref inline-sizes of each column are set from the rows for the automatic /// table layout calculation. fn bubble_inline_sizes(&mut self) { let _scope = layout_debug_scope!("table::bubble_inline_sizes {:x}", self.block_flow.base.debug_id()); // Get column inline sizes from colgroups for kid in self.block_flow.base.child_iter_mut().filter(|kid| kid.is_table_colgroup()) { for specified_inline_size in &kid.as_mut_table_colgroup().inline_sizes { self.column_intrinsic_inline_sizes.push(ColumnIntrinsicInlineSize { minimum_length: match *specified_inline_size { LengthOrPercentageOrAuto::Auto | LengthOrPercentageOrAuto::Calc(_) | LengthOrPercentageOrAuto::Percentage(_) => Au(0), LengthOrPercentageOrAuto::Length(length) => length, }, percentage: match *specified_inline_size { LengthOrPercentageOrAuto::Auto | LengthOrPercentageOrAuto::Calc(_) | LengthOrPercentageOrAuto::Length(_) => 0.0, LengthOrPercentageOrAuto::Percentage(percentage) => percentage.0, }, preferred: Au(0), constrained: false, }) } } self.collapsed_inline_direction_border_widths_for_table = Vec::new(); self.collapsed_block_direction_border_widths_for_table = vec![Au(0)]; let collapsing_borders = self.block_flow .fragment .style .get_inheritedtable() .border_collapse == border_collapse::T::collapse; let table_inline_collapsed_borders = if collapsing_borders { Some(TableInlineCollapsedBorders { start: CollapsedBorder::inline_start(&*self.block_flow.fragment.style, CollapsedBorderProvenance::FromTable), end: CollapsedBorder::inline_end(&*self.block_flow.fragment.style, CollapsedBorderProvenance::FromTable), }) } else { None }; let mut computation = IntrinsicISizesContribution::new(); let mut previous_collapsed_block_end_borders = PreviousBlockCollapsedBorders::FromTable(CollapsedBorder::block_start( &*self.block_flow.fragment.style, CollapsedBorderProvenance::FromTable)); let mut first_row = true; let (border_padding, _) = self.block_flow.fragment.surrounding_intrinsic_inline_size(); { let mut iterator = TableRowIterator::new(&mut self.block_flow.base).peekable(); while let Some(row) = iterator.next() { TableFlow::update_column_inline_sizes_for_row(<|fim▁hole|> row, &mut self.column_intrinsic_inline_sizes, &mut computation, first_row, self.table_layout, border_padding); if collapsing_borders { let next_index_and_sibling = iterator.peek(); let next_collapsed_borders_in_block_direction = match next_index_and_sibling { Some(next_sibling) => { NextBlockCollapsedBorders::FromNextRow( &next_sibling.as_table_row() .preliminary_collapsed_borders .block_start) } None => { NextBlockCollapsedBorders::FromTable( CollapsedBorder::block_end(&*self.block_flow.fragment.style, CollapsedBorderProvenance::FromTable)) } }; perform_border_collapse_for_row(row, table_inline_collapsed_borders.as_ref().unwrap(), previous_collapsed_block_end_borders, next_collapsed_borders_in_block_direction, &mut self.collapsed_inline_direction_border_widths_for_table, &mut self.collapsed_block_direction_border_widths_for_table); previous_collapsed_block_end_borders = PreviousBlockCollapsedBorders::FromPreviousRow( row.final_collapsed_borders.block_end.clone()); } first_row = false }; } let total_horizontal_spacing = self.total_horizontal_spacing(); let mut style_specified_intrinsic_inline_size = self.block_flow .fragment .style_specified_intrinsic_inline_size() .finish(); style_specified_intrinsic_inline_size.minimum_inline_size -= total_horizontal_spacing; style_specified_intrinsic_inline_size.preferred_inline_size -= total_horizontal_spacing; computation.union_block(&style_specified_intrinsic_inline_size); computation.surrounding_size += total_horizontal_spacing; self.block_flow.base.intrinsic_inline_sizes = computation.finish() } /// Recursively (top-down) determines the actual inline-size of child contexts and fragments. /// When called on this context, the context has had its inline-size set by the parent context. fn assign_inline_sizes(&mut self, layout_context: &LayoutContext) { let _scope = layout_debug_scope!("table::assign_inline_sizes {:x}", self.block_flow.base.debug_id()); debug!("assign_inline_sizes({}): assigning inline_size for flow", "table"); let shared_context = layout_context.shared_context(); // The position was set to the containing block by the flow's parent. // FIXME: The code for distributing column widths should really be placed under table_wrapper.rs. let containing_block_inline_size = self.block_flow.base.block_container_inline_size; let mut constrained_column_inline_sizes_indices = vec![]; let mut unspecified_inline_sizes_indices = vec![]; for (idx, column_inline_size) in self.column_intrinsic_inline_sizes.iter().enumerate() { if column_inline_size.constrained { constrained_column_inline_sizes_indices.push(idx); } else if column_inline_size.percentage == 0.0 { unspecified_inline_sizes_indices.push(idx); } } let inline_size_computer = InternalTable { border_collapse: self.block_flow.fragment.style.get_inheritedtable().border_collapse, }; inline_size_computer.compute_used_inline_size(&mut self.block_flow, shared_context, containing_block_inline_size); let inline_start_content_edge = self.block_flow.fragment.border_padding.inline_start; let inline_end_content_edge = self.block_flow.fragment.border_padding.inline_end; let padding_and_borders = self.block_flow.fragment.border_padding.inline_start_end(); let spacing_per_cell = self.spacing(); let total_horizontal_spacing = self.total_horizontal_spacing(); let content_inline_size = self.block_flow.fragment.border_box.size.inline - padding_and_borders - total_horizontal_spacing; let mut remaining_inline_size = content_inline_size; match self.table_layout { TableLayout::Fixed => { self.column_computed_inline_sizes.clear(); // https://drafts.csswg.org/css2/tables.html#fixed-table-layout for column_inline_size in &self.column_intrinsic_inline_sizes { if column_inline_size.constrained { self.column_computed_inline_sizes.push(ColumnComputedInlineSize { size: column_inline_size.minimum_length, }); remaining_inline_size -= column_inline_size.minimum_length; } else if column_inline_size.percentage != 0.0 { let size = remaining_inline_size.scale_by(column_inline_size.percentage); self.column_computed_inline_sizes.push(ColumnComputedInlineSize { size: size, }); remaining_inline_size -= size; } else { // Set the size to 0 now, distribute the remaining widths later self.column_computed_inline_sizes.push(ColumnComputedInlineSize { size: Au(0), }); } } // Distribute remaining content inline size if unspecified_inline_sizes_indices.len() > 0 { for &index in &unspecified_inline_sizes_indices { self.column_computed_inline_sizes[index].size = remaining_inline_size.scale_by(1.0 / unspecified_inline_sizes_indices.len() as f32); } } else { let total_minimum_size = self.column_intrinsic_inline_sizes .iter() .filter(|size| size.constrained) .map(|size| size.minimum_length.0 as f32) .sum::<f32>(); for &index in &constrained_column_inline_sizes_indices { self.column_computed_inline_sizes[index].size += remaining_inline_size.scale_by( self.column_computed_inline_sizes[index].size.0 as f32 / total_minimum_size); } } } _ => { // The table wrapper already computed the inline-sizes and propagated them down // to us. } } let column_computed_inline_sizes = &self.column_computed_inline_sizes; let collapsed_inline_direction_border_widths_for_table = &self.collapsed_inline_direction_border_widths_for_table; let mut collapsed_block_direction_border_widths_for_table = self.collapsed_block_direction_border_widths_for_table.iter().peekable(); let mut incoming_rowspan = vec![]; self.block_flow.propagate_assigned_inline_size_to_children(shared_context, inline_start_content_edge, inline_end_content_edge, content_inline_size, |child_flow, _child_index, _content_inline_size, writing_mode, _inline_start_margin_edge, _inline_end_margin_edge| { table_row::propagate_column_inline_sizes_to_child( child_flow, writing_mode, column_computed_inline_sizes, &spacing_per_cell, &mut incoming_rowspan); if child_flow.is_table_row() { let child_table_row = child_flow.as_mut_table_row(); child_table_row.populate_collapsed_border_spacing( collapsed_inline_direction_border_widths_for_table, &mut collapsed_block_direction_border_widths_for_table); } else if child_flow.is_table_rowgroup() { let child_table_rowgroup = child_flow.as_mut_table_rowgroup(); child_table_rowgroup.populate_collapsed_border_spacing( collapsed_inline_direction_border_widths_for_table, &mut collapsed_block_direction_border_widths_for_table); } }); } fn assign_block_size(&mut self, _: &LayoutContext) { debug!("assign_block_size: assigning block_size for table"); let vertical_spacing = self.spacing().vertical; self.block_flow.assign_block_size_for_table_like_flow(vertical_spacing) } fn compute_absolute_position(&mut self, layout_context: &LayoutContext) { self.block_flow.compute_absolute_position(layout_context) } fn generated_containing_block_size(&self, flow: OpaqueFlow) -> LogicalSize<Au> { self.block_flow.generated_containing_block_size(flow) } fn update_late_computed_inline_position_if_necessary(&mut self, inline_position: Au) { self.block_flow.update_late_computed_inline_position_if_necessary(inline_position) } fn update_late_computed_block_position_if_necessary(&mut self, block_position: Au) { self.block_flow.update_late_computed_block_position_if_necessary(block_position) } fn build_display_list(&mut self, state: &mut DisplayListBuildState) { let border_painting_mode = match self.block_flow .fragment .style .get_inheritedtable() .border_collapse { border_collapse::T::separate => BorderPaintingMode::Separate, border_collapse::T::collapse => BorderPaintingMode::Hidden, }; self.block_flow.build_display_list_for_block(state, border_painting_mode); } fn collect_stacking_contexts(&mut self, state: &mut DisplayListBuildState) { self.block_flow.collect_stacking_contexts(state); } fn repair_style(&mut self, new_style: &::StyleArc<ServoComputedValues>) { self.block_flow.repair_style(new_style) } fn compute_overflow(&self) -> Overflow { self.block_flow.compute_overflow() } fn iterate_through_fragment_border_boxes(&self, iterator: &mut FragmentBorderBoxIterator, level: i32, stacking_context_position: &Point2D<Au>) { self.block_flow.iterate_through_fragment_border_boxes(iterator, level, stacking_context_position) } fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) { self.block_flow.mutate_fragments(mutator) } fn print_extra_flow_children(&self, print_tree: &mut PrintTree) { self.block_flow.print_extra_flow_children(print_tree); } } impl fmt::Debug for TableFlow { /// Outputs a debugging string describing this table flow. fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "TableFlow: {:?}", self.block_flow) } } /// Table, TableRowGroup, TableRow, TableCell types. /// Their inline-sizes are calculated in the same way and do not have margins. pub struct InternalTable { pub border_collapse: border_collapse::T, } impl ISizeAndMarginsComputer for InternalTable { fn compute_border_and_padding(&self, block: &mut BlockFlow, containing_block_inline_size: Au) { block.fragment.compute_border_and_padding(containing_block_inline_size, self.border_collapse) } /// Compute the used value of inline-size, taking care of min-inline-size and max-inline-size. /// /// CSS Section 10.4: Minimum and Maximum inline-sizes fn compute_used_inline_size(&self, block: &mut BlockFlow, shared_context: &SharedStyleContext, parent_flow_inline_size: Au) { let mut input = self.compute_inline_size_constraint_inputs(block, parent_flow_inline_size, shared_context); // Tables are always at least as wide as their minimum inline size. let minimum_inline_size = block.base.intrinsic_inline_sizes.minimum_inline_size - block.fragment.border_padding.inline_start_end(); input.available_inline_size = cmp::max(input.available_inline_size, minimum_inline_size); let solution = self.solve_inline_size_constraints(block, &input); self.set_inline_size_constraint_solutions(block, solution); } /// Solve the inline-size and margins constraints for this block flow. fn solve_inline_size_constraints(&self, _: &mut BlockFlow, input: &ISizeConstraintInput) -> ISizeConstraintSolution { ISizeConstraintSolution::new(input.available_inline_size, Au(0), Au(0)) } } /// Information about the intrinsic inline sizes of columns within a table. /// /// During table inline-size bubbling, we might need to store both a percentage constraint and a /// specific width constraint. For instance, one cell might say that it wants to be 100 pixels wide /// in the inline direction and another cell might say that it wants to take up 20% of the inline- /// size of the table. Now because we bubble up these constraints during the bubble-inline-sizes /// phase of layout, we don't know yet how wide the table is ultimately going to be in the inline /// direction. As we need to pick the maximum width of all cells for a column (in this case, the /// maximum of 100 pixels and 20% of the table), the preceding constraint means that we must /// potentially store both a specified width *and* a specified percentage, so that the inline-size /// assignment phase of layout will know which one to pick. #[derive(Clone, Serialize, Debug, Copy)] pub struct ColumnIntrinsicInlineSize { /// The preferred intrinsic inline size. pub preferred: Au, /// The largest specified size of this column as a length. pub minimum_length: Au, /// The largest specified size of this column as a percentage (`width` property). pub percentage: CSSFloat, /// Whether the column inline size is *constrained* per INTRINSIC § 4.1. pub constrained: bool, } impl ColumnIntrinsicInlineSize { /// Returns a newly-initialized `ColumnIntrinsicInlineSize` with all fields blank. pub fn new() -> ColumnIntrinsicInlineSize { ColumnIntrinsicInlineSize { preferred: Au(0), minimum_length: Au(0), percentage: 0.0, constrained: false, } } /// Returns the higher of the two percentages specified in `self` and `other`. pub fn greatest_percentage(&self, other: &ColumnIntrinsicInlineSize) -> CSSFloat { if self.percentage > other.percentage { self.percentage } else { other.percentage } } } /// The actual inline size for each column. /// /// TODO(pcwalton): There will probably be some `border-collapse`-related info in here too /// eventually. #[derive(Serialize, Clone, Copy, Debug)] pub struct ColumnComputedInlineSize { /// The computed size of this inline column. pub size: Au, } pub trait VecExt<T> { fn push_or_set(&mut self, index: usize, value: T) -> &mut T; fn get_mut_or_push(&mut self, index: usize, zero: T) -> &mut T; } impl<T> VecExt<T> for Vec<T> { fn push_or_set(&mut self, index: usize, value: T) -> &mut T { if index < self.len() { self[index] = value } else { debug_assert!(index == self.len()); self.push(value) } &mut self[index] } fn get_mut_or_push(&mut self, index: usize, zero: T) -> &mut T { if index >= self.len() { debug_assert!(index == self.len()); self.push(zero) } &mut self[index] } } /// Updates the border styles in the block direction for a single row. This function should /// only be called if border collapsing is on. It is factored out into a separate function /// because we process children of rowgroups too. fn perform_border_collapse_for_row(child_table_row: &mut TableRowFlow, table_inline_borders: &TableInlineCollapsedBorders, previous_block_borders: PreviousBlockCollapsedBorders, next_block_borders: NextBlockCollapsedBorders, inline_spacing: &mut Vec<Au>, block_spacing: &mut Vec<Au>) { // TODO mbrubeck: Take rowspan and colspan into account. let number_of_borders_inline_direction = child_table_row.preliminary_collapsed_borders.inline.len(); // Compute interior inline borders. for (i, this_inline_border) in child_table_row.preliminary_collapsed_borders .inline .iter_mut() .enumerate() { child_table_row.final_collapsed_borders.inline.push_or_set(i, *this_inline_border); if i == 0 { child_table_row.final_collapsed_borders.inline[i].combine(&table_inline_borders.start); } else if i + 1 == number_of_borders_inline_direction { child_table_row.final_collapsed_borders.inline[i].combine(&table_inline_borders.end); } let inline_spacing = inline_spacing.get_mut_or_push(i, Au(0)); *inline_spacing = cmp::max(*inline_spacing, child_table_row.final_collapsed_borders.inline[i].width) } // Compute block-start borders. let block_start_borders = &mut child_table_row.final_collapsed_borders.block_start; *block_start_borders = child_table_row.preliminary_collapsed_borders.block_start.clone(); for (i, this_border) in block_start_borders.iter_mut().enumerate() { match previous_block_borders { PreviousBlockCollapsedBorders::FromPreviousRow(ref previous_block_borders) => { if previous_block_borders.len() > i { this_border.combine(&previous_block_borders[i]); } } PreviousBlockCollapsedBorders::FromTable(table_border) => { this_border.combine(&table_border); } } } // Compute block-end borders. let next_block = &mut child_table_row.final_collapsed_borders.block_end; block_spacing.push(Au(0)); let block_spacing = block_spacing.last_mut().unwrap(); for (i, this_block_border) in child_table_row.preliminary_collapsed_borders .block_end .iter() .enumerate() { let next_block = next_block.push_or_set(i, *this_block_border); match next_block_borders { NextBlockCollapsedBorders::FromNextRow(next_block_borders) => { if next_block_borders.len() > i { next_block.combine(&next_block_borders[i]) } } NextBlockCollapsedBorders::FromTable(ref next_block_borders) => { next_block.combine(next_block_borders); } } *block_spacing = cmp::max(*block_spacing, next_block.width) } } /// Encapsulates functionality shared among all table-like flows: for now, tables and table /// rowgroups. pub trait TableLikeFlow { /// Lays out the rows of a table. fn assign_block_size_for_table_like_flow(&mut self, block_direction_spacing: Au); } impl TableLikeFlow for BlockFlow { fn assign_block_size_for_table_like_flow(&mut self, block_direction_spacing: Au) { debug_assert!(self.fragment.style.get_inheritedtable().border_collapse == border_collapse::T::separate || block_direction_spacing == Au(0)); if self.base.restyle_damage.contains(REFLOW) { // Our current border-box position. let block_start_border_padding = self.fragment.border_padding.block_start; let mut current_block_offset = block_start_border_padding; let mut has_rows = false; // At this point, `current_block_offset` is at the content edge of our box. Now iterate // over children. for kid in self.base.child_iter_mut() { // Account for spacing or collapsed borders. if kid.is_table_row() { has_rows = true; let child_table_row = kid.as_table_row(); current_block_offset = current_block_offset + match self.fragment.style.get_inheritedtable().border_collapse { border_collapse::T::separate => block_direction_spacing, border_collapse::T::collapse => { child_table_row.collapsed_border_spacing.block_start } } } // At this point, `current_block_offset` is at the border edge of the child. flow::mut_base(kid).position.start.b = current_block_offset; // Move past the child's border box. Do not use the `translate_including_floats` // function here because the child has already translated floats past its border // box. let kid_base = flow::mut_base(kid); current_block_offset = current_block_offset + kid_base.position.size.block; } // Compute any explicitly-specified block size. // Can't use `for` because we assign to // `candidate_block_size_iterator.candidate_value`. let mut block_size = current_block_offset - block_start_border_padding; let mut candidate_block_size_iterator = CandidateBSizeIterator::new( &self.fragment, self.base.block_container_explicit_block_size); while let Some(candidate_block_size) = candidate_block_size_iterator.next() { candidate_block_size_iterator.candidate_value = match candidate_block_size { MaybeAuto::Auto => block_size, MaybeAuto::Specified(value) => value }; } // Adjust `current_block_offset` as necessary to account for the explicitly-specified // block-size. block_size = candidate_block_size_iterator.candidate_value; let delta = block_size - (current_block_offset - block_start_border_padding); current_block_offset = current_block_offset + delta; // Take border, padding, and spacing into account. let block_end_offset = self.fragment.border_padding.block_end + if has_rows { block_direction_spacing } else { Au(0) }; current_block_offset = current_block_offset + block_end_offset; // Now that `current_block_offset` is at the block-end of the border box, compute the // final border box position. self.fragment.border_box.size.block = current_block_offset; self.fragment.border_box.start.b = Au(0); self.base.position.size.block = current_block_offset; // Write in the size of the relative containing block for children. (This information // is also needed to handle RTL.) for kid in self.base.child_iter_mut() { flow::mut_base(kid).early_absolute_position_info = EarlyAbsolutePositionInfo { relative_containing_block_size: self.fragment.content_box().size, relative_containing_block_mode: self.fragment.style().writing_mode, }; } } self.base.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW); } } /// Inline collapsed borders for the table itself. #[derive(Debug)] struct TableInlineCollapsedBorders { /// The table border at the start of the inline direction. start: CollapsedBorder, /// The table border at the end of the inline direction. end: CollapsedBorder, } enum PreviousBlockCollapsedBorders { FromPreviousRow(Vec<CollapsedBorder>), FromTable(CollapsedBorder), } enum NextBlockCollapsedBorders<'a> { FromNextRow(&'a [CollapsedBorder]), FromTable(CollapsedBorder), } /// Iterator over all the rows of a table struct TableRowIterator<'a> { kids: MutFlowListIterator<'a>, grandkids: Option<MutFlowListIterator<'a>>, } impl<'a> TableRowIterator<'a> { fn new(base: &'a mut BaseFlow) -> Self { TableRowIterator { kids: base.child_iter_mut(), grandkids: None, } } } impl<'a> Iterator for TableRowIterator<'a> { type Item = &'a mut TableRowFlow; #[inline] fn next(&mut self) -> Option<Self::Item> { // If we're inside a rowgroup, iterate through the rowgroup's children. if let Some(ref mut grandkids) = self.grandkids { if let Some(grandkid) = grandkids.next() { return Some(grandkid.as_mut_table_row()) } } // Otherwise, iterate through the table's children. self.grandkids = None; match self.kids.next() { Some(kid) => { if kid.is_table_rowgroup() { self.grandkids = Some(flow::mut_base(kid).child_iter_mut()); self.next() } else if kid.is_table_row() { Some(kid.as_mut_table_row()) } else { self.next() // Skip children that are not rows or rowgroups } } None => None } } }<|fim▁end|>
<|file_name|>closure-reform-pretty.rs<|end_file_name|><|fim▁begin|>// Any copyright is dedicated to the Public Domain. // http://creativecommons.org/publicdomain/zero/1.0/ // pp-exact fn call_it(f: Box<FnMut(String) -> String>) { } fn call_this<F>(f: F) where F: Fn(&str) + Send { } fn call_that<F>(f: F) where F: for<'a> Fn(&'a isize, &'a isize) -> isize { } fn call_extern(f: fn() -> isize) { } fn call_abid_extern(f: extern "C" fn() -> isize) { }<|fim▁hole|>pub fn main() { }<|fim▁end|>
<|file_name|>copyfiles.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../definitions/vsts-task-lib.d.ts" /> import path = require('path'); import os = require('os'); import tl = require('vsts-task-lib/task'); function getCommonLocalPath(files: string[]): string { if (!files || files.length === 0) { return ""; } else if (files.length === 1) { return path.dirname(files[0]); } else { var root: string = files[0]; for (var index = 1; index < files.length; index++) { root = _getCommonLocalPath(root, files[index]); if (!root) { break; } } return root; } } function _getCommonLocalPath(path1: string, path2: string): string { var path1Depth = getFolderDepth(path1); var path2Depth = getFolderDepth(path2); var shortPath: string; var longPath: string; if (path1Depth >= path2Depth) { shortPath = path2; longPath = path1; } else { shortPath = path1; longPath = path2; } while (!isSubItem(longPath, shortPath)) { var parentPath = path.dirname(shortPath); if (path.normalize(parentPath) === path.normalize(shortPath)) { break; } shortPath = parentPath; } return shortPath; } function isSubItem(item: string, parent: string): boolean { item = path.normalize(item); parent = path.normalize(parent); return item.substring(0, parent.length) == parent && (item.length == parent.length || (parent.length > 0 && parent[parent.length - 1] === path.sep) || (item[parent.length] === path.sep)); } function getFolderDepth(fullPath: string): number { if (!fullPath) { return 0; } var current = path.normalize(fullPath); var parentPath = path.dirname(current); var count = 0; while (parentPath !== current) { ++count; current = parentPath; parentPath = path.dirname(current); } return count; } tl.setResourcePath(path.join( __dirname, 'task.json')); // contents is a multiline input containing glob patterns var contents: string[] = tl.getDelimitedInput('Contents', '\n', true); var sourceFolder: string = tl.getPathInput('SourceFolder', true, true); var targetFolder: string = tl.getPathInput('TargetFolder', true); var cleanTargetFolder: boolean = tl.getBoolInput('CleanTargetFolder', false); var overWrite: boolean = tl.getBoolInput('OverWrite', false); // not use common root for now. //var useCommonRoot: boolean = tl.getBoolInput('UseCommonRoot', false); var useCommonRoot: boolean = false; // include filter var includeContents: string[] = []; // exclude filter var excludeContents: string[] = []; for (var i: number = 0; i < contents.length; i++){ var pattern = contents[i].trim(); var negate: Boolean = false; var negateOffset: number = 0; for (var j = 0; j < pattern.length && pattern[j] === '!'; j++){ negate = !negate; negateOffset++; } if(negate){ tl.debug('exclude content pattern: ' + pattern); var realPattern = pattern.substring(0, negateOffset) + path.join(sourceFolder, pattern.substring(negateOffset)); excludeContents.push(realPattern); } else{ tl.debug('include content pattern: ' + pattern); var realPattern = path.join(sourceFolder, pattern); includeContents.push(realPattern); } } // enumerate all files var files: string[] = []; var allPaths: string[] = tl.find(sourceFolder, { followSymbolicLinks: true } as tl.FindOptions); var allFiles: string[] = []; // remove folder path for (var i: number = 0; i < allPaths.length; i++) { if (!tl.stats(allPaths[i]).isDirectory()) { allFiles.push(allPaths[i]); } } // if we only have exclude filters, we need add a include all filter, so we can have something to exclude. if(includeContents.length == 0 && excludeContents.length > 0) { includeContents.push('**'); } if (includeContents.length > 0 && allFiles.length > 0) { tl.debug("allFiles contains " + allFiles.length + " files"); // a map to eliminate duplicates var map = {}; // minimatch options var matchOptions = { matchBase: true }; if(os.type().match(/^Win/)) { matchOptions["nocase"] = true; } // apply include filter for (var i: number = 0; i < includeContents.length; i++) { var pattern = includeContents[i]; tl.debug('Include matching ' + pattern); // let minimatch do the actual filtering var matches: string[] = tl.match(allFiles, pattern, matchOptions); tl.debug('Include matched ' + matches.length + ' files');<|fim▁hole|> map[matchPath] = true; files.push(matchPath); } } } // apply exclude filter for (var i: number = 0; i < excludeContents.length; i++) { var pattern = excludeContents[i]; tl.debug('Exclude matching ' + pattern); // let minimatch do the actual filtering var matches: string[] = tl.match(files, pattern, matchOptions); tl.debug('Exclude matched ' + matches.length + ' files'); files = []; for (var j: number = 0; j < matches.length; j++) { var matchPath = matches[j]; files.push(matchPath); } } } else { tl.debug("Either includeContents or allFiles is empty"); } // copy the files to the target folder console.log(tl.loc('FoundNFiles', files.length)); if (files.length > 0) { // dump all files to debug trace. files.forEach((file: string) => { tl.debug('file:' + file + ' will be copied.'); }) // clean target folder if required if (cleanTargetFolder) { console.log(tl.loc('CleaningTargetFolder', targetFolder)); tl.rmRF(targetFolder); } // make sure the target folder exists tl.mkdirP(targetFolder); var commonRoot: string = ""; if (useCommonRoot) { var computeCommonRoot = getCommonLocalPath(files); if (!!computeCommonRoot) { commonRoot = computeCommonRoot; } else { commonRoot = sourceFolder; } tl.debug("There is a common root (" + commonRoot + ") for the files. Using the remaining path elements in target folder."); } try { var createdFolders = {}; files.forEach((file: string) => { var relativePath = file.substring(sourceFolder.length) .replace(/^\\/g, "") .replace(/^\//g, ""); if (useCommonRoot) { relativePath = file.substring(commonRoot.length) .replace(/^\\/g, "") .replace(/^\//g, ""); } var targetPath = path.join(targetFolder, relativePath); var targetDir = path.dirname(targetPath); if (!createdFolders[targetDir]) { tl.debug("Creating folder " + targetDir); tl.mkdirP(targetDir); createdFolders[targetDir] = true; } if (tl.exist(targetPath) && tl.stats(targetPath).isFile() && !overWrite) { console.log(tl.loc('FileAlreadyExistAt', file, targetPath)); } else { console.log(tl.loc('CopyingTo', file, targetPath)); tl.cp(file, targetPath, "-f"); } }); } catch (err) { tl.setResult(tl.TaskResult.Failed, err); } }<|fim▁end|>
for (var j: number = 0; j < matches.length; j++) { var matchPath = matches[j]; if (!map.hasOwnProperty(matchPath)) {
<|file_name|>closest.js<|end_file_name|><|fim▁begin|>export default function closest(n, arr) { let i let ndx let diff let best = Infinity let low = 0 let high = arr.length - 1 while (low <= high) { // eslint-disable-next-line no-bitwise i = low + ((high - low) >> 1) diff = arr[i] - n if (diff < 0) { low = i + 1 } else if (diff > 0) { high = i - 1 } diff = Math.abs(diff) if (diff < best) { best = diff ndx = i } if (arr[i] === n) break } <|fim▁hole|>}<|fim▁end|>
return arr[ndx]
<|file_name|>tagstore.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import re, logging, copy from threading import Lock class Tagger(object): def __init__(self, hosts_attr="fields.hosts", hosts_sep=":", tag_file="tags_jobs.safe"): self.tags_by_host = {} self.hosts_sep = str(hosts_sep) self.hosts_attr = str(hosts_attr) self.tag_file = tag_file self.lock = Lock() def add(self, measurement): hosts = measurement.get_attr(self.hosts_attr) if not hosts: logging.error("Measurement does not have the attribute with host list %s" % self.hosts_attr) return False hostlist = re.split(self.hosts_sep, hosts.strip("'").strip("\"")) tags = measurement.get_all_tags() if self.hosts_attr in tags: del tags[self.hosts_attr] self.lock.acquire() for h in hostlist: if h in self.tags_by_host: logging.info("Host %s already registered for key %s. Overwrite exiting mapping" % (h, self.tags_by_host[h],)) logging.info("Add Host %s with tags %s" % (h, str(tags),)) self.tags_by_host[h] = tags self.lock.release() return True def delete(self, measurement): hosts = measurement.get_attr(self.hosts_attr) if not hosts: logging.error("Measurement does not have the attribute with host list %s" % self.hosts_attr) return False hostlist = re.split(self.hosts_sep, hosts.strip("'").strip("\"")) self.lock.acquire() for h in hostlist: if h in self.tags_by_host: logging.info("Delete Host %s with tags %s" % (h, str(self.tags_by_host[h]),)) del self.tags_by_host[h] self.lock.release() return True<|fim▁hole|> return copy.deepcopy(self.tags_by_host[host]) def get_all_tags(self): return self.tags_by_host def get_all_active_hosts(self): return sorted(self.tags_by_host.keys()) def host_active(self, host): return host in self.tags_by_host def store(self): f = open(self.tag_file, "w") f.write(json.dumps(self.tags_by_host, sort_keys=True, indent=4, separators=(',', ': '))) f.close() def restore(self): f = open(self.tag_file, "r") self.lock.acquire() self.tags_by_host = json.loads(f.read()) self.lock.release() f.close()<|fim▁end|>
def get_tags_by_host(self, host): if host not in self.tags_by_host: return {}