prompt
large_stringlengths 72
9.34k
| completion
large_stringlengths 0
7.61k
|
---|---|
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
<|fim▁hole|> :expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'<|fim▁end|> | :id: 00fb0b04-0293-42c2-92fa-930c75acee89
|
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
<|fim_middle|>
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | return entities.Organization().create() |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
<|fim_middle|>
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | return entities.Product(organization=module_org).create() |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
<|fim_middle|>
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
<|fim_middle|>
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | """Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.' |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
<|fim_middle|>
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | """Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results]) |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
<|fim_middle|>
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | """Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.' |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
<|fim_middle|>
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | """Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.' |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
<|fim_middle|>
<|fim▁end|> | """Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.' |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def <|fim_middle|>():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | module_org |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def <|fim_middle|>(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | module_custom_product |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def <|fim_middle|>():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | module_org_with_manifest |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def <|fim_middle|>(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | test_positive_sync_custom_repo |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def <|fim_middle|>(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | test_positive_sync_rh_repos |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def <|fim_middle|>(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | test_positive_sync_custom_ostree_repo |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def <|fim_middle|>(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def test_positive_sync_docker_via_sync_status(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | test_positive_sync_rh_ostree_repo |
<|file_name|>test_sync.py<|end_file_name|><|fim▁begin|>"""Test class for Custom Sync UI
:Requirement: Sync
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Repositories
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo import manifests
from robottelo.api.utils import enable_rhrepo_and_fetchid
from robottelo.constants import (
DISTRO_RHEL6, DISTRO_RHEL7,
DOCKER_REGISTRY_HUB,
DOCKER_UPSTREAM_NAME,
FAKE_1_YUM_REPO,
FEDORA27_OSTREE_REPO,
REPOS,
REPOSET,
REPO_TYPE,
PRDS,
)
from robottelo.decorators import (
fixture,
run_in_one_thread,
skip_if_not_set,
tier2,
upgrade,
skip_if_bug_open,
)
from robottelo.decorators.host import skip_if_os
from robottelo.products import (
RepositoryCollection,
RHELCloudFormsTools,
SatelliteCapsuleRepository,
)
@fixture(scope='module')
def module_org():
return entities.Organization().create()
@fixture(scope='module')
def module_custom_product(module_org):
return entities.Product(organization=module_org).create()
@fixture(scope='module')
def module_org_with_manifest():
org = entities.Organization().create()
manifests.upload_manifest_locked(org.id)
return org
@tier2
def test_positive_sync_custom_repo(session, module_custom_product):
"""Create Content Custom Sync with minimal input parameters
:id: 00fb0b04-0293-42c2-92fa-930c75acee89
:expectedresults: Sync procedure is successful
:CaseImportance: Critical
"""
repo = entities.Repository(
url=FAKE_1_YUM_REPO, product=module_custom_product).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_repos(session, module_org_with_manifest):
"""Create Content RedHat Sync with two repos.
:id: e30f6509-0b65-4bcc-a522-b4f3089d3911
:expectedresults: Sync procedure for RedHat Repos is successful
:CaseLevel: Integration
"""
repos = (
SatelliteCapsuleRepository(cdn=True),
RHELCloudFormsTools(cdn=True)
)
distros = [DISTRO_RHEL7, DISTRO_RHEL6]
repo_collections = [
RepositoryCollection(distro=distro, repositories=[repo])
for distro, repo in zip(distros, repos)
]
for repo_collection in repo_collections:
repo_collection.setup(module_org_with_manifest.id, synchronize=False)
repo_paths = [
(
repo.repo_data['product'],
repo.repo_data.get('releasever'),
repo.repo_data.get('arch'),
repo.repo_data['name'],
)
for repo in repos
]
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize(repo_paths)
assert len(results) == len(repo_paths)
assert all([result == 'Syncing Complete.' for result in results])
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@tier2
@upgrade
def test_positive_sync_custom_ostree_repo(session, module_custom_product):
"""Create custom ostree repository and sync it.
:id: e4119b9b-0356-4661-a3ec-e5807224f7d2
:expectedresults: ostree repo should be synced successfully
:CaseLevel: Integration
"""
repo = entities.Repository(
content_type='ostree',
url=FEDORA27_OSTREE_REPO,
product=module_custom_product,
unprotected=False,
).create()
with session:
results = session.sync_status.synchronize([
(module_custom_product.name, repo.name)])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@run_in_one_thread
@skip_if_bug_open('bugzilla', 1625783)
@skip_if_os('RHEL6')
@skip_if_not_set('fake_manifest')
@tier2
@upgrade
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest):
"""Sync CDN based ostree repository.
:id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5
:Steps:
1. Import a valid manifest
2. Enable the OStree repo and sync it
:expectedresults: ostree repo should be synced successfully from CDN
:CaseLevel: Integration
"""
enable_rhrepo_and_fetchid(
basearch=None,
org_id=module_org_with_manifest.id,
product=PRDS['rhah'],
repo=REPOS['rhaht']['name'],
reposet=REPOSET['rhaht'],
releasever=None,
)
with session:
session.organization.select(org_name=module_org_with_manifest.name)
results = session.sync_status.synchronize([
(PRDS['rhah'], REPOS['rhaht']['name'])])
assert len(results) == 1
assert results[0] == 'Syncing Complete.'
@tier2
@upgrade
def <|fim_middle|>(session, module_org):
"""Create custom docker repo and sync it via the sync status page.
:id: 00b700f4-7e52-48ed-98b2-e49b3be102f2
:expectedresults: Sync procedure for specific docker repository is
successful
:CaseLevel: Integration
"""
product = entities.Product(organization=module_org).create()
repo_name = gen_string('alphanumeric')
with session:
session.repository.create(
product.name,
{'name': repo_name,
'repo_type': REPO_TYPE['docker'],
'repo_content.upstream_url': DOCKER_REGISTRY_HUB,
'repo_content.upstream_repo_name': DOCKER_UPSTREAM_NAME}
)
assert session.repository.search(product.name, repo_name)[0]['Name'] == repo_name
result = session.sync_status.synchronize([(product.name, repo_name)])
assert result[0] == 'Syncing Complete.'
<|fim▁end|> | test_positive_sync_docker_via_sync_status |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from timeline import views
urlpatterns = [
url(r'^$', views.timelines, name='timelines'),
]<|fim▁end|> | from django.conf.urls import url
|
<|file_name|>ndvi.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Version 0.1
# NDVI automated acquisition and calculation by Vladyslav Popov
# Using landsat-util, source: https://github.com/developmentseed/landsat-util
# Uses Amazon Web Services Public Dataset (Lansat 8)
# Script should be run every day
from os.path import join, abspath, dirname, exists
import os
import errno
import shutil
from tempfile import mkdtemp
import subprocess
import urllib2
import logging
import sys
import datetime
import re
from landsat.search import Search
from landsat.ndvi import NDVIWithManualColorMap
# Enable logging
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
# Get current date
current_date = datetime.datetime.now().date()
print 'Current date is:', current_date
# Let`s subtract 1 day from current date
sub_date = current_date - datetime.timedelta(days=1)
print 'Subtract date is:', sub_date
# Scene search by date and WRS-2 row and path
search = Search()
try:
search_results = search.search(paths_rows='177,025', start_date=sub_date, end_date=current_date)
search_string = str(search_results.get('results'))
search_list = re.compile('\w+').findall(search_string)
scene_id = str(search_list.pop(5))
print scene_id
l = len(scene_id)
print l
#exit if we have no current image
except Exception:
raise SystemExit('Closing...')
# String concat for building Red Band URL for download
url_red = 'http://landsat-pds.s3.amazonaws.com/L8/177/025/' + scene_id + '/' + scene_id + '_B4.TIF'
# String concat for building NIR Band URL for download
url_nir = 'http://landsat-pds.s3.amazonaws.com/L8/177/025/' + scene_id + '/' + scene_id + '_B5.TIF'
<|fim▁hole|># Build filenames for band rasters and output NDVI file
red_file = scene_id + '_B4.TIF'
nir_file = scene_id + '_B5.TIF'
ndvi_file = scene_id + '_NDVI.TIF'
print 'Filenames builded succsessfuly'
# Create directories for future pssing
base_dir = os.getcwd()
temp_folder = join(base_dir, "temp_folder")
scene_folder = join(temp_folder, scene_id)
if not os.path.exists(temp_folder):
os.makedirs(temp_folder)
if not os.path.exists(scene_folder):
os.makedirs(scene_folder)
# Download section for Band 4 using urllib2
file_name = url_red.split('/')[-1]
u = urllib2.urlopen(url_red)
f = open("temp_folder/"+scene_id+"/"+file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print "Downloading: %s Bytes: %s" % (file_name, file_size)
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
status = status + chr(8)*(len(status)+1)
print status,
f.close()
# Download section for Band 5 using urllib2
file_name = url_nir.split('/')[-1]
u = urllib2.urlopen(url_nir)
f = open("temp_folder/"+scene_id+"/"+file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print "Downloading: %s Bytes: %s" % (file_name, file_size)
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
status = status + chr(8)*(len(status)+1)
print status,
f.close()
# NDVI processing
# Lets create new instance of class
nd = NDVIWithManualColorMap(path=temp_folder+"/"+scene_id, dst_path=temp_folder)
# Start process
print nd.run()
# Create virtual dataset for deviding tiff into tiles
subprocess.call(["gdalbuildvrt", "-a_srs", "EPSG:3857", "NDVImap.vrt", "temp_folder/"+scene_id+"/"+ndvi_file])
# Remove old tiles
shutil.rmtree("ndvi_tiles", ignore_errors=True)
# Start process of deviding with virtual dataset
subprocess.call(["./gdal2tilesp.py", "-w", "none", "-s EPSG:3857", "-p", "mercator", "-z 8-12", "--format=PNG", "--processes=4", "-o", "tms", "NDVImap.vrt", "ndvi_tiles"])
# Let`s clean temporary files (bands, ndvi, vrt)
shutil.rmtree("temp_folder", ignore_errors=True)
os.remove("NDVImap.vrt")
print 'All temporary data was succsessfully removed'
# Close script
raise SystemExit('Closing...')<|fim▁end|> | |
<|file_name|>ndvi.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Version 0.1
# NDVI automated acquisition and calculation by Vladyslav Popov
# Using landsat-util, source: https://github.com/developmentseed/landsat-util
# Uses Amazon Web Services Public Dataset (Lansat 8)
# Script should be run every day
from os.path import join, abspath, dirname, exists
import os
import errno
import shutil
from tempfile import mkdtemp
import subprocess
import urllib2
import logging
import sys
import datetime
import re
from landsat.search import Search
from landsat.ndvi import NDVIWithManualColorMap
# Enable logging
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
# Get current date
current_date = datetime.datetime.now().date()
print 'Current date is:', current_date
# Let`s subtract 1 day from current date
sub_date = current_date - datetime.timedelta(days=1)
print 'Subtract date is:', sub_date
# Scene search by date and WRS-2 row and path
search = Search()
try:
search_results = search.search(paths_rows='177,025', start_date=sub_date, end_date=current_date)
search_string = str(search_results.get('results'))
search_list = re.compile('\w+').findall(search_string)
scene_id = str(search_list.pop(5))
print scene_id
l = len(scene_id)
print l
#exit if we have no current image
except Exception:
raise SystemExit('Closing...')
# String concat for building Red Band URL for download
url_red = 'http://landsat-pds.s3.amazonaws.com/L8/177/025/' + scene_id + '/' + scene_id + '_B4.TIF'
# String concat for building NIR Band URL for download
url_nir = 'http://landsat-pds.s3.amazonaws.com/L8/177/025/' + scene_id + '/' + scene_id + '_B5.TIF'
# Build filenames for band rasters and output NDVI file
red_file = scene_id + '_B4.TIF'
nir_file = scene_id + '_B5.TIF'
ndvi_file = scene_id + '_NDVI.TIF'
print 'Filenames builded succsessfuly'
# Create directories for future pssing
base_dir = os.getcwd()
temp_folder = join(base_dir, "temp_folder")
scene_folder = join(temp_folder, scene_id)
if not os.path.exists(temp_folder):
<|fim_middle|>
if not os.path.exists(scene_folder):
os.makedirs(scene_folder)
# Download section for Band 4 using urllib2
file_name = url_red.split('/')[-1]
u = urllib2.urlopen(url_red)
f = open("temp_folder/"+scene_id+"/"+file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print "Downloading: %s Bytes: %s" % (file_name, file_size)
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
status = status + chr(8)*(len(status)+1)
print status,
f.close()
# Download section for Band 5 using urllib2
file_name = url_nir.split('/')[-1]
u = urllib2.urlopen(url_nir)
f = open("temp_folder/"+scene_id+"/"+file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print "Downloading: %s Bytes: %s" % (file_name, file_size)
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
status = status + chr(8)*(len(status)+1)
print status,
f.close()
# NDVI processing
# Lets create new instance of class
nd = NDVIWithManualColorMap(path=temp_folder+"/"+scene_id, dst_path=temp_folder)
# Start process
print nd.run()
# Create virtual dataset for deviding tiff into tiles
subprocess.call(["gdalbuildvrt", "-a_srs", "EPSG:3857", "NDVImap.vrt", "temp_folder/"+scene_id+"/"+ndvi_file])
# Remove old tiles
shutil.rmtree("ndvi_tiles", ignore_errors=True)
# Start process of deviding with virtual dataset
subprocess.call(["./gdal2tilesp.py", "-w", "none", "-s EPSG:3857", "-p", "mercator", "-z 8-12", "--format=PNG", "--processes=4", "-o", "tms", "NDVImap.vrt", "ndvi_tiles"])
# Let`s clean temporary files (bands, ndvi, vrt)
shutil.rmtree("temp_folder", ignore_errors=True)
os.remove("NDVImap.vrt")
print 'All temporary data was succsessfully removed'
# Close script
raise SystemExit('Closing...')
<|fim▁end|> | os.makedirs(temp_folder) |
<|file_name|>ndvi.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Version 0.1
# NDVI automated acquisition and calculation by Vladyslav Popov
# Using landsat-util, source: https://github.com/developmentseed/landsat-util
# Uses Amazon Web Services Public Dataset (Lansat 8)
# Script should be run every day
from os.path import join, abspath, dirname, exists
import os
import errno
import shutil
from tempfile import mkdtemp
import subprocess
import urllib2
import logging
import sys
import datetime
import re
from landsat.search import Search
from landsat.ndvi import NDVIWithManualColorMap
# Enable logging
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
# Get current date
current_date = datetime.datetime.now().date()
print 'Current date is:', current_date
# Let`s subtract 1 day from current date
sub_date = current_date - datetime.timedelta(days=1)
print 'Subtract date is:', sub_date
# Scene search by date and WRS-2 row and path
search = Search()
try:
search_results = search.search(paths_rows='177,025', start_date=sub_date, end_date=current_date)
search_string = str(search_results.get('results'))
search_list = re.compile('\w+').findall(search_string)
scene_id = str(search_list.pop(5))
print scene_id
l = len(scene_id)
print l
#exit if we have no current image
except Exception:
raise SystemExit('Closing...')
# String concat for building Red Band URL for download
url_red = 'http://landsat-pds.s3.amazonaws.com/L8/177/025/' + scene_id + '/' + scene_id + '_B4.TIF'
# String concat for building NIR Band URL for download
url_nir = 'http://landsat-pds.s3.amazonaws.com/L8/177/025/' + scene_id + '/' + scene_id + '_B5.TIF'
# Build filenames for band rasters and output NDVI file
red_file = scene_id + '_B4.TIF'
nir_file = scene_id + '_B5.TIF'
ndvi_file = scene_id + '_NDVI.TIF'
print 'Filenames builded succsessfuly'
# Create directories for future pssing
base_dir = os.getcwd()
temp_folder = join(base_dir, "temp_folder")
scene_folder = join(temp_folder, scene_id)
if not os.path.exists(temp_folder):
os.makedirs(temp_folder)
if not os.path.exists(scene_folder):
<|fim_middle|>
# Download section for Band 4 using urllib2
file_name = url_red.split('/')[-1]
u = urllib2.urlopen(url_red)
f = open("temp_folder/"+scene_id+"/"+file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print "Downloading: %s Bytes: %s" % (file_name, file_size)
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
status = status + chr(8)*(len(status)+1)
print status,
f.close()
# Download section for Band 5 using urllib2
file_name = url_nir.split('/')[-1]
u = urllib2.urlopen(url_nir)
f = open("temp_folder/"+scene_id+"/"+file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print "Downloading: %s Bytes: %s" % (file_name, file_size)
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
status = status + chr(8)*(len(status)+1)
print status,
f.close()
# NDVI processing
# Lets create new instance of class
nd = NDVIWithManualColorMap(path=temp_folder+"/"+scene_id, dst_path=temp_folder)
# Start process
print nd.run()
# Create virtual dataset for deviding tiff into tiles
subprocess.call(["gdalbuildvrt", "-a_srs", "EPSG:3857", "NDVImap.vrt", "temp_folder/"+scene_id+"/"+ndvi_file])
# Remove old tiles
shutil.rmtree("ndvi_tiles", ignore_errors=True)
# Start process of deviding with virtual dataset
subprocess.call(["./gdal2tilesp.py", "-w", "none", "-s EPSG:3857", "-p", "mercator", "-z 8-12", "--format=PNG", "--processes=4", "-o", "tms", "NDVImap.vrt", "ndvi_tiles"])
# Let`s clean temporary files (bands, ndvi, vrt)
shutil.rmtree("temp_folder", ignore_errors=True)
os.remove("NDVImap.vrt")
print 'All temporary data was succsessfully removed'
# Close script
raise SystemExit('Closing...')
<|fim▁end|> | os.makedirs(scene_folder) |
<|file_name|>ndvi.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Version 0.1
# NDVI automated acquisition and calculation by Vladyslav Popov
# Using landsat-util, source: https://github.com/developmentseed/landsat-util
# Uses Amazon Web Services Public Dataset (Lansat 8)
# Script should be run every day
from os.path import join, abspath, dirname, exists
import os
import errno
import shutil
from tempfile import mkdtemp
import subprocess
import urllib2
import logging
import sys
import datetime
import re
from landsat.search import Search
from landsat.ndvi import NDVIWithManualColorMap
# Enable logging
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
# Get current date
current_date = datetime.datetime.now().date()
print 'Current date is:', current_date
# Let`s subtract 1 day from current date
sub_date = current_date - datetime.timedelta(days=1)
print 'Subtract date is:', sub_date
# Scene search by date and WRS-2 row and path
search = Search()
try:
search_results = search.search(paths_rows='177,025', start_date=sub_date, end_date=current_date)
search_string = str(search_results.get('results'))
search_list = re.compile('\w+').findall(search_string)
scene_id = str(search_list.pop(5))
print scene_id
l = len(scene_id)
print l
#exit if we have no current image
except Exception:
raise SystemExit('Closing...')
# String concat for building Red Band URL for download
url_red = 'http://landsat-pds.s3.amazonaws.com/L8/177/025/' + scene_id + '/' + scene_id + '_B4.TIF'
# String concat for building NIR Band URL for download
url_nir = 'http://landsat-pds.s3.amazonaws.com/L8/177/025/' + scene_id + '/' + scene_id + '_B5.TIF'
# Build filenames for band rasters and output NDVI file
red_file = scene_id + '_B4.TIF'
nir_file = scene_id + '_B5.TIF'
ndvi_file = scene_id + '_NDVI.TIF'
print 'Filenames builded succsessfuly'
# Create directories for future pssing
base_dir = os.getcwd()
temp_folder = join(base_dir, "temp_folder")
scene_folder = join(temp_folder, scene_id)
if not os.path.exists(temp_folder):
os.makedirs(temp_folder)
if not os.path.exists(scene_folder):
os.makedirs(scene_folder)
# Download section for Band 4 using urllib2
file_name = url_red.split('/')[-1]
u = urllib2.urlopen(url_red)
f = open("temp_folder/"+scene_id+"/"+file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print "Downloading: %s Bytes: %s" % (file_name, file_size)
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
<|fim_middle|>
file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
status = status + chr(8)*(len(status)+1)
print status,
f.close()
# Download section for Band 5 using urllib2
file_name = url_nir.split('/')[-1]
u = urllib2.urlopen(url_nir)
f = open("temp_folder/"+scene_id+"/"+file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print "Downloading: %s Bytes: %s" % (file_name, file_size)
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
status = status + chr(8)*(len(status)+1)
print status,
f.close()
# NDVI processing
# Lets create new instance of class
nd = NDVIWithManualColorMap(path=temp_folder+"/"+scene_id, dst_path=temp_folder)
# Start process
print nd.run()
# Create virtual dataset for deviding tiff into tiles
subprocess.call(["gdalbuildvrt", "-a_srs", "EPSG:3857", "NDVImap.vrt", "temp_folder/"+scene_id+"/"+ndvi_file])
# Remove old tiles
shutil.rmtree("ndvi_tiles", ignore_errors=True)
# Start process of deviding with virtual dataset
subprocess.call(["./gdal2tilesp.py", "-w", "none", "-s EPSG:3857", "-p", "mercator", "-z 8-12", "--format=PNG", "--processes=4", "-o", "tms", "NDVImap.vrt", "ndvi_tiles"])
# Let`s clean temporary files (bands, ndvi, vrt)
shutil.rmtree("temp_folder", ignore_errors=True)
os.remove("NDVImap.vrt")
print 'All temporary data was succsessfully removed'
# Close script
raise SystemExit('Closing...')
<|fim▁end|> | break |
<|file_name|>ndvi.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Version 0.1
# NDVI automated acquisition and calculation by Vladyslav Popov
# Using landsat-util, source: https://github.com/developmentseed/landsat-util
# Uses Amazon Web Services Public Dataset (Lansat 8)
# Script should be run every day
from os.path import join, abspath, dirname, exists
import os
import errno
import shutil
from tempfile import mkdtemp
import subprocess
import urllib2
import logging
import sys
import datetime
import re
from landsat.search import Search
from landsat.ndvi import NDVIWithManualColorMap
# Enable logging
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
# Get current date
current_date = datetime.datetime.now().date()
print 'Current date is:', current_date
# Let`s subtract 1 day from current date
sub_date = current_date - datetime.timedelta(days=1)
print 'Subtract date is:', sub_date
# Scene search by date and WRS-2 row and path
search = Search()
try:
search_results = search.search(paths_rows='177,025', start_date=sub_date, end_date=current_date)
search_string = str(search_results.get('results'))
search_list = re.compile('\w+').findall(search_string)
scene_id = str(search_list.pop(5))
print scene_id
l = len(scene_id)
print l
#exit if we have no current image
except Exception:
raise SystemExit('Closing...')
# String concat for building Red Band URL for download
url_red = 'http://landsat-pds.s3.amazonaws.com/L8/177/025/' + scene_id + '/' + scene_id + '_B4.TIF'
# String concat for building NIR Band URL for download
url_nir = 'http://landsat-pds.s3.amazonaws.com/L8/177/025/' + scene_id + '/' + scene_id + '_B5.TIF'
# Build filenames for band rasters and output NDVI file
red_file = scene_id + '_B4.TIF'
nir_file = scene_id + '_B5.TIF'
ndvi_file = scene_id + '_NDVI.TIF'
print 'Filenames builded succsessfuly'
# Create directories for future pssing
base_dir = os.getcwd()
temp_folder = join(base_dir, "temp_folder")
scene_folder = join(temp_folder, scene_id)
if not os.path.exists(temp_folder):
os.makedirs(temp_folder)
if not os.path.exists(scene_folder):
os.makedirs(scene_folder)
# Download section for Band 4 using urllib2
file_name = url_red.split('/')[-1]
u = urllib2.urlopen(url_red)
f = open("temp_folder/"+scene_id+"/"+file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print "Downloading: %s Bytes: %s" % (file_name, file_size)
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
status = status + chr(8)*(len(status)+1)
print status,
f.close()
# Download section for Band 5 using urllib2
file_name = url_nir.split('/')[-1]
u = urllib2.urlopen(url_nir)
f = open("temp_folder/"+scene_id+"/"+file_name, 'wb')
meta = u.info()
file_size = int(meta.getheaders("Content-Length")[0])
print "Downloading: %s Bytes: %s" % (file_name, file_size)
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
<|fim_middle|>
file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
status = status + chr(8)*(len(status)+1)
print status,
f.close()
# NDVI processing
# Lets create new instance of class
nd = NDVIWithManualColorMap(path=temp_folder+"/"+scene_id, dst_path=temp_folder)
# Start process
print nd.run()
# Create virtual dataset for deviding tiff into tiles
subprocess.call(["gdalbuildvrt", "-a_srs", "EPSG:3857", "NDVImap.vrt", "temp_folder/"+scene_id+"/"+ndvi_file])
# Remove old tiles
shutil.rmtree("ndvi_tiles", ignore_errors=True)
# Start process of deviding with virtual dataset
subprocess.call(["./gdal2tilesp.py", "-w", "none", "-s EPSG:3857", "-p", "mercator", "-z 8-12", "--format=PNG", "--processes=4", "-o", "tms", "NDVImap.vrt", "ndvi_tiles"])
# Let`s clean temporary files (bands, ndvi, vrt)
shutil.rmtree("temp_folder", ignore_errors=True)
os.remove("NDVImap.vrt")
print 'All temporary data was succsessfully removed'
# Close script
raise SystemExit('Closing...')
<|fim▁end|> | break |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.shortcuts import redirect, render_to_response
from django.template.context import RequestContext
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
# Just redirect / to /blog for now until I can
# come up with something to put on the homepage..
def to_blog(request):
return redirect('/blog/', permanent=False)
# Follow the BSD license and allow the source/binary to reproduce
# the license and copyright message
def sslicense(request):
slicense = """
Copyright (c) 2012-2013 Justin Crawford <[email protected]>
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE
"""
ctx = {
'parts': {<|fim▁hole|> "html_title": "License",
"fragment": slicense.replace('\n', '<br>'),
},
}
return render_to_response('docs/docs.html', RequestContext(request, ctx))
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'StackSmash.views.home', name='home'),
# url(r'^StackSmash/', include('StackSmash.foo.urls')),
# TODO: Fix index and use something... Should identify subdomains somehow..
#url(r'^$', include('StackSmash.apps.blog.urls')),
url(r'^license/', sslicense, name='license'),
#url(r'^docs/', include('StackSmash.apps.docs.urls'), name='docs', app_name='docs'),
url(r'^blog/', include('StackSmash.apps.blog.urls', namespace='blog')),
url(r'^projects/', include('StackSmash.apps.projects.urls', namespace='projects')),
url(r'^upload/', include('StackSmash.apps.uploader.urls', namespace='upload')),
url(r'^$', to_blog, name='index'),
#url(r'^projects/', include('StackSmash.apps.projects.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls), name='admin'),
)<|fim▁end|> | "title": "License", |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.shortcuts import redirect, render_to_response
from django.template.context import RequestContext
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
# Just redirect / to /blog for now until I can
# come up with something to put on the homepage..
def to_blog(request):
<|fim_middle|>
# Follow the BSD license and allow the source/binary to reproduce
# the license and copyright message
def sslicense(request):
slicense = """
Copyright (c) 2012-2013 Justin Crawford <[email protected]>
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE
"""
ctx = {
'parts': {
"title": "License",
"html_title": "License",
"fragment": slicense.replace('\n', '<br>'),
},
}
return render_to_response('docs/docs.html', RequestContext(request, ctx))
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'StackSmash.views.home', name='home'),
# url(r'^StackSmash/', include('StackSmash.foo.urls')),
# TODO: Fix index and use something... Should identify subdomains somehow..
#url(r'^$', include('StackSmash.apps.blog.urls')),
url(r'^license/', sslicense, name='license'),
#url(r'^docs/', include('StackSmash.apps.docs.urls'), name='docs', app_name='docs'),
url(r'^blog/', include('StackSmash.apps.blog.urls', namespace='blog')),
url(r'^projects/', include('StackSmash.apps.projects.urls', namespace='projects')),
url(r'^upload/', include('StackSmash.apps.uploader.urls', namespace='upload')),
url(r'^$', to_blog, name='index'),
#url(r'^projects/', include('StackSmash.apps.projects.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls), name='admin'),
)
<|fim▁end|> | return redirect('/blog/', permanent=False) |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.shortcuts import redirect, render_to_response
from django.template.context import RequestContext
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
# Just redirect / to /blog for now until I can
# come up with something to put on the homepage..
def to_blog(request):
return redirect('/blog/', permanent=False)
# Follow the BSD license and allow the source/binary to reproduce
# the license and copyright message
def sslicense(request):
<|fim_middle|>
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'StackSmash.views.home', name='home'),
# url(r'^StackSmash/', include('StackSmash.foo.urls')),
# TODO: Fix index and use something... Should identify subdomains somehow..
#url(r'^$', include('StackSmash.apps.blog.urls')),
url(r'^license/', sslicense, name='license'),
#url(r'^docs/', include('StackSmash.apps.docs.urls'), name='docs', app_name='docs'),
url(r'^blog/', include('StackSmash.apps.blog.urls', namespace='blog')),
url(r'^projects/', include('StackSmash.apps.projects.urls', namespace='projects')),
url(r'^upload/', include('StackSmash.apps.uploader.urls', namespace='upload')),
url(r'^$', to_blog, name='index'),
#url(r'^projects/', include('StackSmash.apps.projects.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls), name='admin'),
)
<|fim▁end|> | slicense = """
Copyright (c) 2012-2013 Justin Crawford <[email protected]>
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE
"""
ctx = {
'parts': {
"title": "License",
"html_title": "License",
"fragment": slicense.replace('\n', '<br>'),
},
}
return render_to_response('docs/docs.html', RequestContext(request, ctx)) |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.shortcuts import redirect, render_to_response
from django.template.context import RequestContext
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
# Just redirect / to /blog for now until I can
# come up with something to put on the homepage..
def <|fim_middle|>(request):
return redirect('/blog/', permanent=False)
# Follow the BSD license and allow the source/binary to reproduce
# the license and copyright message
def sslicense(request):
slicense = """
Copyright (c) 2012-2013 Justin Crawford <[email protected]>
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE
"""
ctx = {
'parts': {
"title": "License",
"html_title": "License",
"fragment": slicense.replace('\n', '<br>'),
},
}
return render_to_response('docs/docs.html', RequestContext(request, ctx))
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'StackSmash.views.home', name='home'),
# url(r'^StackSmash/', include('StackSmash.foo.urls')),
# TODO: Fix index and use something... Should identify subdomains somehow..
#url(r'^$', include('StackSmash.apps.blog.urls')),
url(r'^license/', sslicense, name='license'),
#url(r'^docs/', include('StackSmash.apps.docs.urls'), name='docs', app_name='docs'),
url(r'^blog/', include('StackSmash.apps.blog.urls', namespace='blog')),
url(r'^projects/', include('StackSmash.apps.projects.urls', namespace='projects')),
url(r'^upload/', include('StackSmash.apps.uploader.urls', namespace='upload')),
url(r'^$', to_blog, name='index'),
#url(r'^projects/', include('StackSmash.apps.projects.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls), name='admin'),
)
<|fim▁end|> | to_blog |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.shortcuts import redirect, render_to_response
from django.template.context import RequestContext
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
# Just redirect / to /blog for now until I can
# come up with something to put on the homepage..
def to_blog(request):
return redirect('/blog/', permanent=False)
# Follow the BSD license and allow the source/binary to reproduce
# the license and copyright message
def <|fim_middle|>(request):
slicense = """
Copyright (c) 2012-2013 Justin Crawford <[email protected]>
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE
"""
ctx = {
'parts': {
"title": "License",
"html_title": "License",
"fragment": slicense.replace('\n', '<br>'),
},
}
return render_to_response('docs/docs.html', RequestContext(request, ctx))
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'StackSmash.views.home', name='home'),
# url(r'^StackSmash/', include('StackSmash.foo.urls')),
# TODO: Fix index and use something... Should identify subdomains somehow..
#url(r'^$', include('StackSmash.apps.blog.urls')),
url(r'^license/', sslicense, name='license'),
#url(r'^docs/', include('StackSmash.apps.docs.urls'), name='docs', app_name='docs'),
url(r'^blog/', include('StackSmash.apps.blog.urls', namespace='blog')),
url(r'^projects/', include('StackSmash.apps.projects.urls', namespace='projects')),
url(r'^upload/', include('StackSmash.apps.uploader.urls', namespace='upload')),
url(r'^$', to_blog, name='index'),
#url(r'^projects/', include('StackSmash.apps.projects.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls), name='admin'),
)
<|fim▁end|> | sslicense |
<|file_name|>one_D_helmholtz.py<|end_file_name|><|fim▁begin|>from numpy import array, zeros, ones, sqrt, ravel, mod, random, inner, conjugate
from scipy.sparse import csr_matrix, csc_matrix, coo_matrix, bmat, eye
from scipy import rand, mat, real, imag, linspace, hstack, vstack, exp, cos, sin, pi
from pyamg.util.linalg import norm
import pyamg
from scipy.optimize import fminbound, fmin
__all__ = ['one_D_helmholtz', 'min_wave']
def min_wave(A, omega, x, tol=1e-5, maxiter=25):
'''
parameters
----------
A {matrix}
1D Helmholtz Operator
omega {scalar}
Wavenumber used to discretize Helmholtz problem
x {array}
1D mesh for the problem
tol {scalar}
minimization tolerance
maxit {integer}
maximum iters for minimization algorithm
<|fim▁hole|> for the matrix A, i.e., the omega shift that minimizes <Ac, c> / <c, c>,
for c = cosine((omega+shift)x)
'''
x = ravel(x)
# Define scalar objective function, ignoring the
# boundaries by only considering A*c at [1:-1]
def obj_fcn(alpha):
c = cos((omega+alpha)*x)
Ac = (A*c)[1:-1]
return norm(Ac)/norm(c[1:-1])
(xopt, fval, ierr, numfunc) = fminbound(obj_fcn, -0.99*omega, \
0.99*omega, xtol=tol, maxfun=maxiter, full_output=True, disp=0)
#print "Minimizer = %1.4f, Function Value at Min = %1.4e\nError Flag = %d,\
# Number of function evals = %d" % (xopt, fval, ierr, numfunc)
return xopt
def one_D_helmholtz(h, omega=1.0, nplane_waves=2):
'''
parameters
----------
h {int}
Number of grid spacings for 1-D Helmholtz
omega {float}
Defines Helmholtz wave number
nplane_waves {int}
Defines the number of planewaves used for the near null-space modes, B.
1: B = [ exp(ikx) ]
2: B = [ real(exp(ikx)), complex(exp(ikx)) ]
returns
-------
dictionary containing:
A {matrix-like}
LHS of linear system for Helmholtz problem,
-laplace(u) - omega^2 u = f
mesh_h {float}
mesh size
vertices {array-like}
[X, Y]
elements {None}
None, just using 1-D finite-differencing
'''
# Ensure Repeatability of "random" initial guess
random.seed(10)
# Mesh Spacing
mesh_h = 1.0/(float(h)-1.0)
# Construct Real Operator
reA = pyamg.gallery.poisson( (h,), format='csr')
reA = reA - mesh_h*mesh_h*omega*omega*\
eye(reA.shape[0], reA.shape[1], format='csr')
dimen = reA.shape[0]
# Construct Imaginary Operator
imA = csr_matrix( coo_matrix( (array([2.0*mesh_h*omega]), \
(array([0]), array([0]))), shape=reA.shape) )
# Enforce Radiation Boundary Conditions at first grid point
reA.data[1] = -2.0
# In order to maintain symmetry scale the first equation by 1/2
reA.data[0] = 0.5*reA.data[0]
reA.data[1] = 0.5*reA.data[1]
imA.data[0] = 0.5*imA.data[0]
# Create complex-valued system
complexA = reA + 1.0j*imA
# For this case, the CG (continuous Galerkin) case is the default elements and vertices
# because there is no DG mesh to speak of
elements = None
vertices = hstack((linspace(-1.0,1.0,h).reshape(-1,1), zeros((h,1))))
# Near null-space modes are 1-D Plane waves: [exp(ikx), i exp(ikx)]
B = zeros( (dimen, nplane_waves), dtype=complex )
shift = min_wave(complexA, omega, vertices[:,0], tol=1e-9, maxiter=15)
if nplane_waves == 1:
B[:,0] = exp(1.0j*(omega+shift)*vertices[:,0])
elif nplane_waves == 2:
B[:,0] = cos((omega+shift)*vertices[:,0])
B[:,1] = sin((omega+shift)*vertices[:,0])
return {'A' : complexA, 'B' : B, 'mesh_h' : mesh_h, \
'elements' : elements, 'vertices' : vertices}<|fim▁end|> | returns
-------
Applies minimization algorithm to find numerically lowest energy wavenumber |
<|file_name|>one_D_helmholtz.py<|end_file_name|><|fim▁begin|>from numpy import array, zeros, ones, sqrt, ravel, mod, random, inner, conjugate
from scipy.sparse import csr_matrix, csc_matrix, coo_matrix, bmat, eye
from scipy import rand, mat, real, imag, linspace, hstack, vstack, exp, cos, sin, pi
from pyamg.util.linalg import norm
import pyamg
from scipy.optimize import fminbound, fmin
__all__ = ['one_D_helmholtz', 'min_wave']
def min_wave(A, omega, x, tol=1e-5, maxiter=25):
<|fim_middle|>
def one_D_helmholtz(h, omega=1.0, nplane_waves=2):
'''
parameters
----------
h {int}
Number of grid spacings for 1-D Helmholtz
omega {float}
Defines Helmholtz wave number
nplane_waves {int}
Defines the number of planewaves used for the near null-space modes, B.
1: B = [ exp(ikx) ]
2: B = [ real(exp(ikx)), complex(exp(ikx)) ]
returns
-------
dictionary containing:
A {matrix-like}
LHS of linear system for Helmholtz problem,
-laplace(u) - omega^2 u = f
mesh_h {float}
mesh size
vertices {array-like}
[X, Y]
elements {None}
None, just using 1-D finite-differencing
'''
# Ensure Repeatability of "random" initial guess
random.seed(10)
# Mesh Spacing
mesh_h = 1.0/(float(h)-1.0)
# Construct Real Operator
reA = pyamg.gallery.poisson( (h,), format='csr')
reA = reA - mesh_h*mesh_h*omega*omega*\
eye(reA.shape[0], reA.shape[1], format='csr')
dimen = reA.shape[0]
# Construct Imaginary Operator
imA = csr_matrix( coo_matrix( (array([2.0*mesh_h*omega]), \
(array([0]), array([0]))), shape=reA.shape) )
# Enforce Radiation Boundary Conditions at first grid point
reA.data[1] = -2.0
# In order to maintain symmetry scale the first equation by 1/2
reA.data[0] = 0.5*reA.data[0]
reA.data[1] = 0.5*reA.data[1]
imA.data[0] = 0.5*imA.data[0]
# Create complex-valued system
complexA = reA + 1.0j*imA
# For this case, the CG (continuous Galerkin) case is the default elements and vertices
# because there is no DG mesh to speak of
elements = None
vertices = hstack((linspace(-1.0,1.0,h).reshape(-1,1), zeros((h,1))))
# Near null-space modes are 1-D Plane waves: [exp(ikx), i exp(ikx)]
B = zeros( (dimen, nplane_waves), dtype=complex )
shift = min_wave(complexA, omega, vertices[:,0], tol=1e-9, maxiter=15)
if nplane_waves == 1:
B[:,0] = exp(1.0j*(omega+shift)*vertices[:,0])
elif nplane_waves == 2:
B[:,0] = cos((omega+shift)*vertices[:,0])
B[:,1] = sin((omega+shift)*vertices[:,0])
return {'A' : complexA, 'B' : B, 'mesh_h' : mesh_h, \
'elements' : elements, 'vertices' : vertices}
<|fim▁end|> | '''
parameters
----------
A {matrix}
1D Helmholtz Operator
omega {scalar}
Wavenumber used to discretize Helmholtz problem
x {array}
1D mesh for the problem
tol {scalar}
minimization tolerance
maxit {integer}
maximum iters for minimization algorithm
returns
-------
Applies minimization algorithm to find numerically lowest energy wavenumber
for the matrix A, i.e., the omega shift that minimizes <Ac, c> / <c, c>,
for c = cosine((omega+shift)x)
'''
x = ravel(x)
# Define scalar objective function, ignoring the
# boundaries by only considering A*c at [1:-1]
def obj_fcn(alpha):
c = cos((omega+alpha)*x)
Ac = (A*c)[1:-1]
return norm(Ac)/norm(c[1:-1])
(xopt, fval, ierr, numfunc) = fminbound(obj_fcn, -0.99*omega, \
0.99*omega, xtol=tol, maxfun=maxiter, full_output=True, disp=0)
#print "Minimizer = %1.4f, Function Value at Min = %1.4e\nError Flag = %d,\
# Number of function evals = %d" % (xopt, fval, ierr, numfunc)
return xopt |
<|file_name|>one_D_helmholtz.py<|end_file_name|><|fim▁begin|>from numpy import array, zeros, ones, sqrt, ravel, mod, random, inner, conjugate
from scipy.sparse import csr_matrix, csc_matrix, coo_matrix, bmat, eye
from scipy import rand, mat, real, imag, linspace, hstack, vstack, exp, cos, sin, pi
from pyamg.util.linalg import norm
import pyamg
from scipy.optimize import fminbound, fmin
__all__ = ['one_D_helmholtz', 'min_wave']
def min_wave(A, omega, x, tol=1e-5, maxiter=25):
'''
parameters
----------
A {matrix}
1D Helmholtz Operator
omega {scalar}
Wavenumber used to discretize Helmholtz problem
x {array}
1D mesh for the problem
tol {scalar}
minimization tolerance
maxit {integer}
maximum iters for minimization algorithm
returns
-------
Applies minimization algorithm to find numerically lowest energy wavenumber
for the matrix A, i.e., the omega shift that minimizes <Ac, c> / <c, c>,
for c = cosine((omega+shift)x)
'''
x = ravel(x)
# Define scalar objective function, ignoring the
# boundaries by only considering A*c at [1:-1]
def obj_fcn(alpha):
<|fim_middle|>
(xopt, fval, ierr, numfunc) = fminbound(obj_fcn, -0.99*omega, \
0.99*omega, xtol=tol, maxfun=maxiter, full_output=True, disp=0)
#print "Minimizer = %1.4f, Function Value at Min = %1.4e\nError Flag = %d,\
# Number of function evals = %d" % (xopt, fval, ierr, numfunc)
return xopt
def one_D_helmholtz(h, omega=1.0, nplane_waves=2):
'''
parameters
----------
h {int}
Number of grid spacings for 1-D Helmholtz
omega {float}
Defines Helmholtz wave number
nplane_waves {int}
Defines the number of planewaves used for the near null-space modes, B.
1: B = [ exp(ikx) ]
2: B = [ real(exp(ikx)), complex(exp(ikx)) ]
returns
-------
dictionary containing:
A {matrix-like}
LHS of linear system for Helmholtz problem,
-laplace(u) - omega^2 u = f
mesh_h {float}
mesh size
vertices {array-like}
[X, Y]
elements {None}
None, just using 1-D finite-differencing
'''
# Ensure Repeatability of "random" initial guess
random.seed(10)
# Mesh Spacing
mesh_h = 1.0/(float(h)-1.0)
# Construct Real Operator
reA = pyamg.gallery.poisson( (h,), format='csr')
reA = reA - mesh_h*mesh_h*omega*omega*\
eye(reA.shape[0], reA.shape[1], format='csr')
dimen = reA.shape[0]
# Construct Imaginary Operator
imA = csr_matrix( coo_matrix( (array([2.0*mesh_h*omega]), \
(array([0]), array([0]))), shape=reA.shape) )
# Enforce Radiation Boundary Conditions at first grid point
reA.data[1] = -2.0
# In order to maintain symmetry scale the first equation by 1/2
reA.data[0] = 0.5*reA.data[0]
reA.data[1] = 0.5*reA.data[1]
imA.data[0] = 0.5*imA.data[0]
# Create complex-valued system
complexA = reA + 1.0j*imA
# For this case, the CG (continuous Galerkin) case is the default elements and vertices
# because there is no DG mesh to speak of
elements = None
vertices = hstack((linspace(-1.0,1.0,h).reshape(-1,1), zeros((h,1))))
# Near null-space modes are 1-D Plane waves: [exp(ikx), i exp(ikx)]
B = zeros( (dimen, nplane_waves), dtype=complex )
shift = min_wave(complexA, omega, vertices[:,0], tol=1e-9, maxiter=15)
if nplane_waves == 1:
B[:,0] = exp(1.0j*(omega+shift)*vertices[:,0])
elif nplane_waves == 2:
B[:,0] = cos((omega+shift)*vertices[:,0])
B[:,1] = sin((omega+shift)*vertices[:,0])
return {'A' : complexA, 'B' : B, 'mesh_h' : mesh_h, \
'elements' : elements, 'vertices' : vertices}
<|fim▁end|> | c = cos((omega+alpha)*x)
Ac = (A*c)[1:-1]
return norm(Ac)/norm(c[1:-1]) |
<|file_name|>one_D_helmholtz.py<|end_file_name|><|fim▁begin|>from numpy import array, zeros, ones, sqrt, ravel, mod, random, inner, conjugate
from scipy.sparse import csr_matrix, csc_matrix, coo_matrix, bmat, eye
from scipy import rand, mat, real, imag, linspace, hstack, vstack, exp, cos, sin, pi
from pyamg.util.linalg import norm
import pyamg
from scipy.optimize import fminbound, fmin
__all__ = ['one_D_helmholtz', 'min_wave']
def min_wave(A, omega, x, tol=1e-5, maxiter=25):
'''
parameters
----------
A {matrix}
1D Helmholtz Operator
omega {scalar}
Wavenumber used to discretize Helmholtz problem
x {array}
1D mesh for the problem
tol {scalar}
minimization tolerance
maxit {integer}
maximum iters for minimization algorithm
returns
-------
Applies minimization algorithm to find numerically lowest energy wavenumber
for the matrix A, i.e., the omega shift that minimizes <Ac, c> / <c, c>,
for c = cosine((omega+shift)x)
'''
x = ravel(x)
# Define scalar objective function, ignoring the
# boundaries by only considering A*c at [1:-1]
def obj_fcn(alpha):
c = cos((omega+alpha)*x)
Ac = (A*c)[1:-1]
return norm(Ac)/norm(c[1:-1])
(xopt, fval, ierr, numfunc) = fminbound(obj_fcn, -0.99*omega, \
0.99*omega, xtol=tol, maxfun=maxiter, full_output=True, disp=0)
#print "Minimizer = %1.4f, Function Value at Min = %1.4e\nError Flag = %d,\
# Number of function evals = %d" % (xopt, fval, ierr, numfunc)
return xopt
def one_D_helmholtz(h, omega=1.0, nplane_waves=2):
<|fim_middle|>
<|fim▁end|> | '''
parameters
----------
h {int}
Number of grid spacings for 1-D Helmholtz
omega {float}
Defines Helmholtz wave number
nplane_waves {int}
Defines the number of planewaves used for the near null-space modes, B.
1: B = [ exp(ikx) ]
2: B = [ real(exp(ikx)), complex(exp(ikx)) ]
returns
-------
dictionary containing:
A {matrix-like}
LHS of linear system for Helmholtz problem,
-laplace(u) - omega^2 u = f
mesh_h {float}
mesh size
vertices {array-like}
[X, Y]
elements {None}
None, just using 1-D finite-differencing
'''
# Ensure Repeatability of "random" initial guess
random.seed(10)
# Mesh Spacing
mesh_h = 1.0/(float(h)-1.0)
# Construct Real Operator
reA = pyamg.gallery.poisson( (h,), format='csr')
reA = reA - mesh_h*mesh_h*omega*omega*\
eye(reA.shape[0], reA.shape[1], format='csr')
dimen = reA.shape[0]
# Construct Imaginary Operator
imA = csr_matrix( coo_matrix( (array([2.0*mesh_h*omega]), \
(array([0]), array([0]))), shape=reA.shape) )
# Enforce Radiation Boundary Conditions at first grid point
reA.data[1] = -2.0
# In order to maintain symmetry scale the first equation by 1/2
reA.data[0] = 0.5*reA.data[0]
reA.data[1] = 0.5*reA.data[1]
imA.data[0] = 0.5*imA.data[0]
# Create complex-valued system
complexA = reA + 1.0j*imA
# For this case, the CG (continuous Galerkin) case is the default elements and vertices
# because there is no DG mesh to speak of
elements = None
vertices = hstack((linspace(-1.0,1.0,h).reshape(-1,1), zeros((h,1))))
# Near null-space modes are 1-D Plane waves: [exp(ikx), i exp(ikx)]
B = zeros( (dimen, nplane_waves), dtype=complex )
shift = min_wave(complexA, omega, vertices[:,0], tol=1e-9, maxiter=15)
if nplane_waves == 1:
B[:,0] = exp(1.0j*(omega+shift)*vertices[:,0])
elif nplane_waves == 2:
B[:,0] = cos((omega+shift)*vertices[:,0])
B[:,1] = sin((omega+shift)*vertices[:,0])
return {'A' : complexA, 'B' : B, 'mesh_h' : mesh_h, \
'elements' : elements, 'vertices' : vertices} |
<|file_name|>one_D_helmholtz.py<|end_file_name|><|fim▁begin|>from numpy import array, zeros, ones, sqrt, ravel, mod, random, inner, conjugate
from scipy.sparse import csr_matrix, csc_matrix, coo_matrix, bmat, eye
from scipy import rand, mat, real, imag, linspace, hstack, vstack, exp, cos, sin, pi
from pyamg.util.linalg import norm
import pyamg
from scipy.optimize import fminbound, fmin
__all__ = ['one_D_helmholtz', 'min_wave']
def min_wave(A, omega, x, tol=1e-5, maxiter=25):
'''
parameters
----------
A {matrix}
1D Helmholtz Operator
omega {scalar}
Wavenumber used to discretize Helmholtz problem
x {array}
1D mesh for the problem
tol {scalar}
minimization tolerance
maxit {integer}
maximum iters for minimization algorithm
returns
-------
Applies minimization algorithm to find numerically lowest energy wavenumber
for the matrix A, i.e., the omega shift that minimizes <Ac, c> / <c, c>,
for c = cosine((omega+shift)x)
'''
x = ravel(x)
# Define scalar objective function, ignoring the
# boundaries by only considering A*c at [1:-1]
def obj_fcn(alpha):
c = cos((omega+alpha)*x)
Ac = (A*c)[1:-1]
return norm(Ac)/norm(c[1:-1])
(xopt, fval, ierr, numfunc) = fminbound(obj_fcn, -0.99*omega, \
0.99*omega, xtol=tol, maxfun=maxiter, full_output=True, disp=0)
#print "Minimizer = %1.4f, Function Value at Min = %1.4e\nError Flag = %d,\
# Number of function evals = %d" % (xopt, fval, ierr, numfunc)
return xopt
def one_D_helmholtz(h, omega=1.0, nplane_waves=2):
'''
parameters
----------
h {int}
Number of grid spacings for 1-D Helmholtz
omega {float}
Defines Helmholtz wave number
nplane_waves {int}
Defines the number of planewaves used for the near null-space modes, B.
1: B = [ exp(ikx) ]
2: B = [ real(exp(ikx)), complex(exp(ikx)) ]
returns
-------
dictionary containing:
A {matrix-like}
LHS of linear system for Helmholtz problem,
-laplace(u) - omega^2 u = f
mesh_h {float}
mesh size
vertices {array-like}
[X, Y]
elements {None}
None, just using 1-D finite-differencing
'''
# Ensure Repeatability of "random" initial guess
random.seed(10)
# Mesh Spacing
mesh_h = 1.0/(float(h)-1.0)
# Construct Real Operator
reA = pyamg.gallery.poisson( (h,), format='csr')
reA = reA - mesh_h*mesh_h*omega*omega*\
eye(reA.shape[0], reA.shape[1], format='csr')
dimen = reA.shape[0]
# Construct Imaginary Operator
imA = csr_matrix( coo_matrix( (array([2.0*mesh_h*omega]), \
(array([0]), array([0]))), shape=reA.shape) )
# Enforce Radiation Boundary Conditions at first grid point
reA.data[1] = -2.0
# In order to maintain symmetry scale the first equation by 1/2
reA.data[0] = 0.5*reA.data[0]
reA.data[1] = 0.5*reA.data[1]
imA.data[0] = 0.5*imA.data[0]
# Create complex-valued system
complexA = reA + 1.0j*imA
# For this case, the CG (continuous Galerkin) case is the default elements and vertices
# because there is no DG mesh to speak of
elements = None
vertices = hstack((linspace(-1.0,1.0,h).reshape(-1,1), zeros((h,1))))
# Near null-space modes are 1-D Plane waves: [exp(ikx), i exp(ikx)]
B = zeros( (dimen, nplane_waves), dtype=complex )
shift = min_wave(complexA, omega, vertices[:,0], tol=1e-9, maxiter=15)
if nplane_waves == 1:
<|fim_middle|>
elif nplane_waves == 2:
B[:,0] = cos((omega+shift)*vertices[:,0])
B[:,1] = sin((omega+shift)*vertices[:,0])
return {'A' : complexA, 'B' : B, 'mesh_h' : mesh_h, \
'elements' : elements, 'vertices' : vertices}
<|fim▁end|> | B[:,0] = exp(1.0j*(omega+shift)*vertices[:,0]) |
<|file_name|>one_D_helmholtz.py<|end_file_name|><|fim▁begin|>from numpy import array, zeros, ones, sqrt, ravel, mod, random, inner, conjugate
from scipy.sparse import csr_matrix, csc_matrix, coo_matrix, bmat, eye
from scipy import rand, mat, real, imag, linspace, hstack, vstack, exp, cos, sin, pi
from pyamg.util.linalg import norm
import pyamg
from scipy.optimize import fminbound, fmin
__all__ = ['one_D_helmholtz', 'min_wave']
def min_wave(A, omega, x, tol=1e-5, maxiter=25):
'''
parameters
----------
A {matrix}
1D Helmholtz Operator
omega {scalar}
Wavenumber used to discretize Helmholtz problem
x {array}
1D mesh for the problem
tol {scalar}
minimization tolerance
maxit {integer}
maximum iters for minimization algorithm
returns
-------
Applies minimization algorithm to find numerically lowest energy wavenumber
for the matrix A, i.e., the omega shift that minimizes <Ac, c> / <c, c>,
for c = cosine((omega+shift)x)
'''
x = ravel(x)
# Define scalar objective function, ignoring the
# boundaries by only considering A*c at [1:-1]
def obj_fcn(alpha):
c = cos((omega+alpha)*x)
Ac = (A*c)[1:-1]
return norm(Ac)/norm(c[1:-1])
(xopt, fval, ierr, numfunc) = fminbound(obj_fcn, -0.99*omega, \
0.99*omega, xtol=tol, maxfun=maxiter, full_output=True, disp=0)
#print "Minimizer = %1.4f, Function Value at Min = %1.4e\nError Flag = %d,\
# Number of function evals = %d" % (xopt, fval, ierr, numfunc)
return xopt
def one_D_helmholtz(h, omega=1.0, nplane_waves=2):
'''
parameters
----------
h {int}
Number of grid spacings for 1-D Helmholtz
omega {float}
Defines Helmholtz wave number
nplane_waves {int}
Defines the number of planewaves used for the near null-space modes, B.
1: B = [ exp(ikx) ]
2: B = [ real(exp(ikx)), complex(exp(ikx)) ]
returns
-------
dictionary containing:
A {matrix-like}
LHS of linear system for Helmholtz problem,
-laplace(u) - omega^2 u = f
mesh_h {float}
mesh size
vertices {array-like}
[X, Y]
elements {None}
None, just using 1-D finite-differencing
'''
# Ensure Repeatability of "random" initial guess
random.seed(10)
# Mesh Spacing
mesh_h = 1.0/(float(h)-1.0)
# Construct Real Operator
reA = pyamg.gallery.poisson( (h,), format='csr')
reA = reA - mesh_h*mesh_h*omega*omega*\
eye(reA.shape[0], reA.shape[1], format='csr')
dimen = reA.shape[0]
# Construct Imaginary Operator
imA = csr_matrix( coo_matrix( (array([2.0*mesh_h*omega]), \
(array([0]), array([0]))), shape=reA.shape) )
# Enforce Radiation Boundary Conditions at first grid point
reA.data[1] = -2.0
# In order to maintain symmetry scale the first equation by 1/2
reA.data[0] = 0.5*reA.data[0]
reA.data[1] = 0.5*reA.data[1]
imA.data[0] = 0.5*imA.data[0]
# Create complex-valued system
complexA = reA + 1.0j*imA
# For this case, the CG (continuous Galerkin) case is the default elements and vertices
# because there is no DG mesh to speak of
elements = None
vertices = hstack((linspace(-1.0,1.0,h).reshape(-1,1), zeros((h,1))))
# Near null-space modes are 1-D Plane waves: [exp(ikx), i exp(ikx)]
B = zeros( (dimen, nplane_waves), dtype=complex )
shift = min_wave(complexA, omega, vertices[:,0], tol=1e-9, maxiter=15)
if nplane_waves == 1:
B[:,0] = exp(1.0j*(omega+shift)*vertices[:,0])
elif nplane_waves == 2:
<|fim_middle|>
return {'A' : complexA, 'B' : B, 'mesh_h' : mesh_h, \
'elements' : elements, 'vertices' : vertices}
<|fim▁end|> | B[:,0] = cos((omega+shift)*vertices[:,0])
B[:,1] = sin((omega+shift)*vertices[:,0]) |
<|file_name|>one_D_helmholtz.py<|end_file_name|><|fim▁begin|>from numpy import array, zeros, ones, sqrt, ravel, mod, random, inner, conjugate
from scipy.sparse import csr_matrix, csc_matrix, coo_matrix, bmat, eye
from scipy import rand, mat, real, imag, linspace, hstack, vstack, exp, cos, sin, pi
from pyamg.util.linalg import norm
import pyamg
from scipy.optimize import fminbound, fmin
__all__ = ['one_D_helmholtz', 'min_wave']
def <|fim_middle|>(A, omega, x, tol=1e-5, maxiter=25):
'''
parameters
----------
A {matrix}
1D Helmholtz Operator
omega {scalar}
Wavenumber used to discretize Helmholtz problem
x {array}
1D mesh for the problem
tol {scalar}
minimization tolerance
maxit {integer}
maximum iters for minimization algorithm
returns
-------
Applies minimization algorithm to find numerically lowest energy wavenumber
for the matrix A, i.e., the omega shift that minimizes <Ac, c> / <c, c>,
for c = cosine((omega+shift)x)
'''
x = ravel(x)
# Define scalar objective function, ignoring the
# boundaries by only considering A*c at [1:-1]
def obj_fcn(alpha):
c = cos((omega+alpha)*x)
Ac = (A*c)[1:-1]
return norm(Ac)/norm(c[1:-1])
(xopt, fval, ierr, numfunc) = fminbound(obj_fcn, -0.99*omega, \
0.99*omega, xtol=tol, maxfun=maxiter, full_output=True, disp=0)
#print "Minimizer = %1.4f, Function Value at Min = %1.4e\nError Flag = %d,\
# Number of function evals = %d" % (xopt, fval, ierr, numfunc)
return xopt
def one_D_helmholtz(h, omega=1.0, nplane_waves=2):
'''
parameters
----------
h {int}
Number of grid spacings for 1-D Helmholtz
omega {float}
Defines Helmholtz wave number
nplane_waves {int}
Defines the number of planewaves used for the near null-space modes, B.
1: B = [ exp(ikx) ]
2: B = [ real(exp(ikx)), complex(exp(ikx)) ]
returns
-------
dictionary containing:
A {matrix-like}
LHS of linear system for Helmholtz problem,
-laplace(u) - omega^2 u = f
mesh_h {float}
mesh size
vertices {array-like}
[X, Y]
elements {None}
None, just using 1-D finite-differencing
'''
# Ensure Repeatability of "random" initial guess
random.seed(10)
# Mesh Spacing
mesh_h = 1.0/(float(h)-1.0)
# Construct Real Operator
reA = pyamg.gallery.poisson( (h,), format='csr')
reA = reA - mesh_h*mesh_h*omega*omega*\
eye(reA.shape[0], reA.shape[1], format='csr')
dimen = reA.shape[0]
# Construct Imaginary Operator
imA = csr_matrix( coo_matrix( (array([2.0*mesh_h*omega]), \
(array([0]), array([0]))), shape=reA.shape) )
# Enforce Radiation Boundary Conditions at first grid point
reA.data[1] = -2.0
# In order to maintain symmetry scale the first equation by 1/2
reA.data[0] = 0.5*reA.data[0]
reA.data[1] = 0.5*reA.data[1]
imA.data[0] = 0.5*imA.data[0]
# Create complex-valued system
complexA = reA + 1.0j*imA
# For this case, the CG (continuous Galerkin) case is the default elements and vertices
# because there is no DG mesh to speak of
elements = None
vertices = hstack((linspace(-1.0,1.0,h).reshape(-1,1), zeros((h,1))))
# Near null-space modes are 1-D Plane waves: [exp(ikx), i exp(ikx)]
B = zeros( (dimen, nplane_waves), dtype=complex )
shift = min_wave(complexA, omega, vertices[:,0], tol=1e-9, maxiter=15)
if nplane_waves == 1:
B[:,0] = exp(1.0j*(omega+shift)*vertices[:,0])
elif nplane_waves == 2:
B[:,0] = cos((omega+shift)*vertices[:,0])
B[:,1] = sin((omega+shift)*vertices[:,0])
return {'A' : complexA, 'B' : B, 'mesh_h' : mesh_h, \
'elements' : elements, 'vertices' : vertices}
<|fim▁end|> | min_wave |
<|file_name|>one_D_helmholtz.py<|end_file_name|><|fim▁begin|>from numpy import array, zeros, ones, sqrt, ravel, mod, random, inner, conjugate
from scipy.sparse import csr_matrix, csc_matrix, coo_matrix, bmat, eye
from scipy import rand, mat, real, imag, linspace, hstack, vstack, exp, cos, sin, pi
from pyamg.util.linalg import norm
import pyamg
from scipy.optimize import fminbound, fmin
__all__ = ['one_D_helmholtz', 'min_wave']
def min_wave(A, omega, x, tol=1e-5, maxiter=25):
'''
parameters
----------
A {matrix}
1D Helmholtz Operator
omega {scalar}
Wavenumber used to discretize Helmholtz problem
x {array}
1D mesh for the problem
tol {scalar}
minimization tolerance
maxit {integer}
maximum iters for minimization algorithm
returns
-------
Applies minimization algorithm to find numerically lowest energy wavenumber
for the matrix A, i.e., the omega shift that minimizes <Ac, c> / <c, c>,
for c = cosine((omega+shift)x)
'''
x = ravel(x)
# Define scalar objective function, ignoring the
# boundaries by only considering A*c at [1:-1]
def <|fim_middle|>(alpha):
c = cos((omega+alpha)*x)
Ac = (A*c)[1:-1]
return norm(Ac)/norm(c[1:-1])
(xopt, fval, ierr, numfunc) = fminbound(obj_fcn, -0.99*omega, \
0.99*omega, xtol=tol, maxfun=maxiter, full_output=True, disp=0)
#print "Minimizer = %1.4f, Function Value at Min = %1.4e\nError Flag = %d,\
# Number of function evals = %d" % (xopt, fval, ierr, numfunc)
return xopt
def one_D_helmholtz(h, omega=1.0, nplane_waves=2):
'''
parameters
----------
h {int}
Number of grid spacings for 1-D Helmholtz
omega {float}
Defines Helmholtz wave number
nplane_waves {int}
Defines the number of planewaves used for the near null-space modes, B.
1: B = [ exp(ikx) ]
2: B = [ real(exp(ikx)), complex(exp(ikx)) ]
returns
-------
dictionary containing:
A {matrix-like}
LHS of linear system for Helmholtz problem,
-laplace(u) - omega^2 u = f
mesh_h {float}
mesh size
vertices {array-like}
[X, Y]
elements {None}
None, just using 1-D finite-differencing
'''
# Ensure Repeatability of "random" initial guess
random.seed(10)
# Mesh Spacing
mesh_h = 1.0/(float(h)-1.0)
# Construct Real Operator
reA = pyamg.gallery.poisson( (h,), format='csr')
reA = reA - mesh_h*mesh_h*omega*omega*\
eye(reA.shape[0], reA.shape[1], format='csr')
dimen = reA.shape[0]
# Construct Imaginary Operator
imA = csr_matrix( coo_matrix( (array([2.0*mesh_h*omega]), \
(array([0]), array([0]))), shape=reA.shape) )
# Enforce Radiation Boundary Conditions at first grid point
reA.data[1] = -2.0
# In order to maintain symmetry scale the first equation by 1/2
reA.data[0] = 0.5*reA.data[0]
reA.data[1] = 0.5*reA.data[1]
imA.data[0] = 0.5*imA.data[0]
# Create complex-valued system
complexA = reA + 1.0j*imA
# For this case, the CG (continuous Galerkin) case is the default elements and vertices
# because there is no DG mesh to speak of
elements = None
vertices = hstack((linspace(-1.0,1.0,h).reshape(-1,1), zeros((h,1))))
# Near null-space modes are 1-D Plane waves: [exp(ikx), i exp(ikx)]
B = zeros( (dimen, nplane_waves), dtype=complex )
shift = min_wave(complexA, omega, vertices[:,0], tol=1e-9, maxiter=15)
if nplane_waves == 1:
B[:,0] = exp(1.0j*(omega+shift)*vertices[:,0])
elif nplane_waves == 2:
B[:,0] = cos((omega+shift)*vertices[:,0])
B[:,1] = sin((omega+shift)*vertices[:,0])
return {'A' : complexA, 'B' : B, 'mesh_h' : mesh_h, \
'elements' : elements, 'vertices' : vertices}
<|fim▁end|> | obj_fcn |
<|file_name|>one_D_helmholtz.py<|end_file_name|><|fim▁begin|>from numpy import array, zeros, ones, sqrt, ravel, mod, random, inner, conjugate
from scipy.sparse import csr_matrix, csc_matrix, coo_matrix, bmat, eye
from scipy import rand, mat, real, imag, linspace, hstack, vstack, exp, cos, sin, pi
from pyamg.util.linalg import norm
import pyamg
from scipy.optimize import fminbound, fmin
__all__ = ['one_D_helmholtz', 'min_wave']
def min_wave(A, omega, x, tol=1e-5, maxiter=25):
'''
parameters
----------
A {matrix}
1D Helmholtz Operator
omega {scalar}
Wavenumber used to discretize Helmholtz problem
x {array}
1D mesh for the problem
tol {scalar}
minimization tolerance
maxit {integer}
maximum iters for minimization algorithm
returns
-------
Applies minimization algorithm to find numerically lowest energy wavenumber
for the matrix A, i.e., the omega shift that minimizes <Ac, c> / <c, c>,
for c = cosine((omega+shift)x)
'''
x = ravel(x)
# Define scalar objective function, ignoring the
# boundaries by only considering A*c at [1:-1]
def obj_fcn(alpha):
c = cos((omega+alpha)*x)
Ac = (A*c)[1:-1]
return norm(Ac)/norm(c[1:-1])
(xopt, fval, ierr, numfunc) = fminbound(obj_fcn, -0.99*omega, \
0.99*omega, xtol=tol, maxfun=maxiter, full_output=True, disp=0)
#print "Minimizer = %1.4f, Function Value at Min = %1.4e\nError Flag = %d,\
# Number of function evals = %d" % (xopt, fval, ierr, numfunc)
return xopt
def <|fim_middle|>(h, omega=1.0, nplane_waves=2):
'''
parameters
----------
h {int}
Number of grid spacings for 1-D Helmholtz
omega {float}
Defines Helmholtz wave number
nplane_waves {int}
Defines the number of planewaves used for the near null-space modes, B.
1: B = [ exp(ikx) ]
2: B = [ real(exp(ikx)), complex(exp(ikx)) ]
returns
-------
dictionary containing:
A {matrix-like}
LHS of linear system for Helmholtz problem,
-laplace(u) - omega^2 u = f
mesh_h {float}
mesh size
vertices {array-like}
[X, Y]
elements {None}
None, just using 1-D finite-differencing
'''
# Ensure Repeatability of "random" initial guess
random.seed(10)
# Mesh Spacing
mesh_h = 1.0/(float(h)-1.0)
# Construct Real Operator
reA = pyamg.gallery.poisson( (h,), format='csr')
reA = reA - mesh_h*mesh_h*omega*omega*\
eye(reA.shape[0], reA.shape[1], format='csr')
dimen = reA.shape[0]
# Construct Imaginary Operator
imA = csr_matrix( coo_matrix( (array([2.0*mesh_h*omega]), \
(array([0]), array([0]))), shape=reA.shape) )
# Enforce Radiation Boundary Conditions at first grid point
reA.data[1] = -2.0
# In order to maintain symmetry scale the first equation by 1/2
reA.data[0] = 0.5*reA.data[0]
reA.data[1] = 0.5*reA.data[1]
imA.data[0] = 0.5*imA.data[0]
# Create complex-valued system
complexA = reA + 1.0j*imA
# For this case, the CG (continuous Galerkin) case is the default elements and vertices
# because there is no DG mesh to speak of
elements = None
vertices = hstack((linspace(-1.0,1.0,h).reshape(-1,1), zeros((h,1))))
# Near null-space modes are 1-D Plane waves: [exp(ikx), i exp(ikx)]
B = zeros( (dimen, nplane_waves), dtype=complex )
shift = min_wave(complexA, omega, vertices[:,0], tol=1e-9, maxiter=15)
if nplane_waves == 1:
B[:,0] = exp(1.0j*(omega+shift)*vertices[:,0])
elif nplane_waves == 2:
B[:,0] = cos((omega+shift)*vertices[:,0])
B[:,1] = sin((omega+shift)*vertices[:,0])
return {'A' : complexA, 'B' : B, 'mesh_h' : mesh_h, \
'elements' : elements, 'vertices' : vertices}
<|fim▁end|> | one_D_helmholtz |
<|file_name|>query.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version. See http://www.gnu.org/copyleft/gpl.html for
# the full text of the license.
# query.py: Perform a few varieties of queries
from __future__ import print_function
import time
import bugzilla
# public test instance of bugzilla.redhat.com. It's okay to make changes
URL = "partner-bugzilla.redhat.com"
bzapi = bugzilla.Bugzilla(URL)
# build_query is a helper function that handles some bugzilla version
# incompatibility issues. All it does is return a properly formatted
# dict(), and provide friendly parameter names. The param names map
# to those accepted by XMLRPC Bug.search:
# https://bugzilla.readthedocs.io/en/latest/api/core/v1/bug.html#search-bugs
query = bzapi.build_query(
product="Fedora",
component="python-bugzilla")
# Since 'query' is just a dict, you could set your own parameters too, like
# if your bugzilla had a custom field. This will set 'status' for example,
# but for common opts it's better to use build_query
query["status"] = "CLOSED"
# query() is what actually performs the query. it's a wrapper around Bug.search
t1 = time.time()
bugs = bzapi.query(query)
t2 = time.time()
print("Found %d bugs with our query" % len(bugs))
print("Query processing time: %s" % (t2 - t1))
# Depending on the size of your query, you can massively speed things up
# by telling bugzilla to only return the fields you care about, since a
# large chunk of the return time is transmitting the extra bug data. You
# tweak this with include_fields:
# https://wiki.mozilla.org/Bugzilla:BzAPI#Field_Control
# Bugzilla will only return those fields listed in include_fields.
query = bzapi.build_query(
product="Fedora",
component="python-bugzilla",
include_fields=["id", "summary"])
t1 = time.time()
bugs = bzapi.query(query)
t2 = time.time()
print("Quicker query processing time: %s" % (t2 - t1))
# bugzilla.redhat.com, and bugzilla >= 5.0 support queries using the same
# format as is used for 'advanced' search URLs via the Web UI. For example,
# I go to partner-bugzilla.redhat.com -> Search -> Advanced Search, select
# Classification=Fedora
# Product=Fedora
# Component=python-bugzilla
# Unselect all bug statuses (so, all status values)
# Under Custom Search
# Creation date -- is less than or equal to -- 2010-01-01
#
# Run that, copy the URL and bring it here, pass it to url_to_query to
# convert it to a dict(), and query as usual
query = bzapi.url_to_query("https://partner-bugzilla.redhat.com/"
"buglist.cgi?classification=Fedora&component=python-bugzilla&"
"f1=creation_ts&o1=lessthaneq&order=Importance&product=Fedora&"
"query_format=advanced&v1=2010-01-01")
query["include_fields"] = ["id", "summary"]
bugs = bzapi.query(query)
print("The URL query returned 22 bugs... "
"I know that without even checking because it shouldn't change!... "
"(count is %d)" % len(bugs))
# One note about querying... you can get subtley different results if
# you are not logged in. Depending on your bugzilla setup it may not matter,
# but if you are dealing with private bugs, check bzapi.logged_in setting<|fim▁hole|># an example usage<|fim▁end|> | # to ensure your cached credentials are up to date. See update.py for |
<|file_name|>typogrify.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from jinja2 import Markup
from rstblog.programs import RSTProgram
import typogrify
<|fim▁hole|>class TypogrifyRSTProgram(RSTProgram):
def get_fragments(self):
if self._fragment_cache is not None:
return self._fragment_cache
with self.context.open_source_file() as f:
self.get_header(f)
rv = self.context.render_rst(f.read().decode('utf-8'))
rv['fragment'] = Markup(typogrify.typogrify(rv['fragment']))
self._fragment_cache = rv
return rv
def setup(builder):
builder.programs['rst'] = TypogrifyRSTProgram<|fim▁end|> | |
<|file_name|>typogrify.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from jinja2 import Markup
from rstblog.programs import RSTProgram
import typogrify
class TypogrifyRSTProgram(RSTProgram):
<|fim_middle|>
def setup(builder):
builder.programs['rst'] = TypogrifyRSTProgram<|fim▁end|> | def get_fragments(self):
if self._fragment_cache is not None:
return self._fragment_cache
with self.context.open_source_file() as f:
self.get_header(f)
rv = self.context.render_rst(f.read().decode('utf-8'))
rv['fragment'] = Markup(typogrify.typogrify(rv['fragment']))
self._fragment_cache = rv
return rv |
<|file_name|>typogrify.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from jinja2 import Markup
from rstblog.programs import RSTProgram
import typogrify
class TypogrifyRSTProgram(RSTProgram):
def get_fragments(self):
<|fim_middle|>
def setup(builder):
builder.programs['rst'] = TypogrifyRSTProgram<|fim▁end|> | if self._fragment_cache is not None:
return self._fragment_cache
with self.context.open_source_file() as f:
self.get_header(f)
rv = self.context.render_rst(f.read().decode('utf-8'))
rv['fragment'] = Markup(typogrify.typogrify(rv['fragment']))
self._fragment_cache = rv
return rv |
<|file_name|>typogrify.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from jinja2 import Markup
from rstblog.programs import RSTProgram
import typogrify
class TypogrifyRSTProgram(RSTProgram):
def get_fragments(self):
if self._fragment_cache is not None:
return self._fragment_cache
with self.context.open_source_file() as f:
self.get_header(f)
rv = self.context.render_rst(f.read().decode('utf-8'))
rv['fragment'] = Markup(typogrify.typogrify(rv['fragment']))
self._fragment_cache = rv
return rv
def setup(builder):
<|fim_middle|>
<|fim▁end|> | builder.programs['rst'] = TypogrifyRSTProgram |
<|file_name|>typogrify.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from jinja2 import Markup
from rstblog.programs import RSTProgram
import typogrify
class TypogrifyRSTProgram(RSTProgram):
def get_fragments(self):
if self._fragment_cache is not None:
<|fim_middle|>
with self.context.open_source_file() as f:
self.get_header(f)
rv = self.context.render_rst(f.read().decode('utf-8'))
rv['fragment'] = Markup(typogrify.typogrify(rv['fragment']))
self._fragment_cache = rv
return rv
def setup(builder):
builder.programs['rst'] = TypogrifyRSTProgram<|fim▁end|> | return self._fragment_cache |
<|file_name|>typogrify.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from jinja2 import Markup
from rstblog.programs import RSTProgram
import typogrify
class TypogrifyRSTProgram(RSTProgram):
def <|fim_middle|>(self):
if self._fragment_cache is not None:
return self._fragment_cache
with self.context.open_source_file() as f:
self.get_header(f)
rv = self.context.render_rst(f.read().decode('utf-8'))
rv['fragment'] = Markup(typogrify.typogrify(rv['fragment']))
self._fragment_cache = rv
return rv
def setup(builder):
builder.programs['rst'] = TypogrifyRSTProgram<|fim▁end|> | get_fragments |
<|file_name|>typogrify.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from jinja2 import Markup
from rstblog.programs import RSTProgram
import typogrify
class TypogrifyRSTProgram(RSTProgram):
def get_fragments(self):
if self._fragment_cache is not None:
return self._fragment_cache
with self.context.open_source_file() as f:
self.get_header(f)
rv = self.context.render_rst(f.read().decode('utf-8'))
rv['fragment'] = Markup(typogrify.typogrify(rv['fragment']))
self._fragment_cache = rv
return rv
def <|fim_middle|>(builder):
builder.programs['rst'] = TypogrifyRSTProgram<|fim▁end|> | setup |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with<|fim▁hole|>
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)<|fim▁end|> | comand substitution. |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
<|fim_middle|>
<|fim▁end|> | """A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
) |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
<|fim_middle|>
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)
<|fim▁end|> | self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
<|fim_middle|>
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)
<|fim▁end|> | if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir) |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def run(self, cmd, **kwargs):
<|fim_middle|>
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)
<|fim▁end|> | self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs) |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
<|fim_middle|>
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)
<|fim▁end|> | path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path) |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
<|fim_middle|>
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)
<|fim▁end|> | return os.path.exists(self.path(*parts)) |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
<|fim_middle|>
<|fim▁end|> | """Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
) |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
<|fim_middle|>
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)
<|fim▁end|> | self.__makedirs(self.prefix_dir) |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def <|fim_middle|>(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)
<|fim▁end|> | __init__ |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def <|fim_middle|>(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)
<|fim▁end|> | _create_path_if_not_exists |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def <|fim_middle|>(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)
<|fim▁end|> | run |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def <|fim_middle|>(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)
<|fim▁end|> | path |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def <|fim_middle|>(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def from_command_runner(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)
<|fim▁end|> | exists |
<|file_name|>prefixed_command_runner.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
import os.path
import subprocess
from pre_commit.util import cmd_output
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
For instance:
PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
will run ['/tmp/foo/foo.sh', 'bar', 'baz']
"""
def __init__(
self,
prefix_dir,
popen=subprocess.Popen,
makedirs=os.makedirs
):
self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
self.__popen = popen
self.__makedirs = makedirs
def _create_path_if_not_exists(self):
if not os.path.exists(self.prefix_dir):
self.__makedirs(self.prefix_dir)
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
replaced_cmd = [
part.replace('{prefix}', self.prefix_dir) for part in cmd
]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
path = os.path.join(self.prefix_dir, *parts)
return os.path.normpath(path)
def exists(self, *parts):
return os.path.exists(self.path(*parts))
@classmethod
def <|fim_middle|>(cls, command_runner, path_end):
"""Constructs a new command runner from an existing one by appending
`path_end` to the command runner's prefix directory.
"""
return cls(
command_runner.path(path_end),
popen=command_runner.__popen,
makedirs=command_runner.__makedirs,
)
<|fim▁end|> | from_command_runner |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
<|fim▁hole|> def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]<|fim▁end|> | @staticmethod |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
<|fim_middle|>
<|fim▁end|> | instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name] |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
<|fim_middle|>
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz() |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
<|fim_middle|>
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
<|fim_middle|>
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
<|fim_middle|>
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | return ObjectBackendRegistry.backends[backend].get_timestamps(dn) |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
<|fim_middle|>
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
<|fim_middle|>
<|fim▁end|> | if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name] |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def <|fim_middle|>(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | __init__ |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def <|fim_middle|>(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | dn2uuid |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def <|fim_middle|>(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | uuid2dn |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def <|fim_middle|>(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | get_timestamps |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def <|fim_middle|>():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | getInstance |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def <|fim_middle|>(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | getBackend |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
<|fim_middle|>
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid'] |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
<|fim_middle|>
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | self.__index = PluginRegistry.getInstance("ObjectIndex") |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
<|fim_middle|>
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | uuid = res[0]['_uuid'] |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
<|fim_middle|>
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn'] |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
<|fim_middle|>
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | self.__index = PluginRegistry.getInstance("ObjectIndex") |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
<|fim_middle|>
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | dn = res[0]['dn'] |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
<|fim_middle|>
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
raise ValueError(C.make_error("BACKEND_NOT_FOUND", name))
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | ObjectBackendRegistry.instance = ObjectBackendRegistry() |
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># This file is part of the GOsa framework.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
import pkg_resources
from gosa.common.components import PluginRegistry
from gosa.common.utils import N_
from gosa.common.error import GosaErrorHandler as C
# Register the errors handled by us
C.register_codes(dict(
BACKEND_NOT_FOUND=N_("Backend '%(topic)s' not found"),
))
class ObjectBackendRegistry(object):
instance = None
backends = {}
uuidAttr = "entryUUID"
__index = None
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz()
def dn2uuid(self, backend, dn, from_db_only=False):
uuid = ObjectBackendRegistry.backends[backend].dn2uuid(dn)
if uuid is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'dn': dn}, {'uuid': 1})
if len(res) == 1:
uuid = res[0]['_uuid']
return uuid
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn
def get_timestamps(self, backend, dn):
return ObjectBackendRegistry.backends[backend].get_timestamps(dn)
@staticmethod
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance
@staticmethod
def getBackend(name):
if not name in ObjectBackendRegistry.backends:
<|fim_middle|>
return ObjectBackendRegistry.backends[name]
<|fim▁end|> | raise ValueError(C.make_error("BACKEND_NOT_FOUND", name)) |
<|file_name|>teamqueue.py<|end_file_name|><|fim▁begin|>################################################################<|fim▁hole|># modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
################################################################
import sys
import time
import logging
import jobmanager.io.agents as agents
import jobmanager.io.jobs as jobs
from jobmanager.config import Config
from peewee import fn
from liveq.models import Agent, AgentGroup, Jobs
# Setup logger
logger = logging.getLogger("teamqueue")
def processTeamQueue():
"""
This should be called periodically to check and schedule jobs pending for the
particular team
"""
pass<|fim▁end|> | # LiveQ - An interactive volunteering computing batch system
# Copyright (C) 2013 Ioannis Charalampidis
#
# This program is free software; you can redistribute it and/or |
<|file_name|>teamqueue.py<|end_file_name|><|fim▁begin|>################################################################
# LiveQ - An interactive volunteering computing batch system
# Copyright (C) 2013 Ioannis Charalampidis
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
################################################################
import sys
import time
import logging
import jobmanager.io.agents as agents
import jobmanager.io.jobs as jobs
from jobmanager.config import Config
from peewee import fn
from liveq.models import Agent, AgentGroup, Jobs
# Setup logger
logger = logging.getLogger("teamqueue")
def processTeamQueue():
<|fim_middle|>
<|fim▁end|> | """
This should be called periodically to check and schedule jobs pending for the
particular team
"""
pass |
<|file_name|>teamqueue.py<|end_file_name|><|fim▁begin|>################################################################
# LiveQ - An interactive volunteering computing batch system
# Copyright (C) 2013 Ioannis Charalampidis
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
################################################################
import sys
import time
import logging
import jobmanager.io.agents as agents
import jobmanager.io.jobs as jobs
from jobmanager.config import Config
from peewee import fn
from liveq.models import Agent, AgentGroup, Jobs
# Setup logger
logger = logging.getLogger("teamqueue")
def <|fim_middle|>():
"""
This should be called periodically to check and schedule jobs pending for the
particular team
"""
pass
<|fim▁end|> | processTeamQueue |
<|file_name|>md5.py<|end_file_name|><|fim▁begin|>from __future__ import with_statement
from fabric.api import task
<|fim▁hole|>
@task
def md5():
"""
Check MD5 sums (unavailable, empty, with content)
"""
import hashlib
from fabric.api import cd, hide, run, settings
import fabtools
with cd('/tmp'):
run('touch f1')
assert fabtools.files.md5sum('f1') == hashlib.md5('').hexdigest()
run('echo -n hello > f2')
assert fabtools.files.md5sum('f2') == hashlib.md5('hello').hexdigest()
with settings(hide('warnings')):
assert fabtools.files.md5sum('doesnotexist') is None<|fim▁end|> | |
<|file_name|>md5.py<|end_file_name|><|fim▁begin|>from __future__ import with_statement
from fabric.api import task
@task
def md5():
<|fim_middle|>
<|fim▁end|> | """
Check MD5 sums (unavailable, empty, with content)
"""
import hashlib
from fabric.api import cd, hide, run, settings
import fabtools
with cd('/tmp'):
run('touch f1')
assert fabtools.files.md5sum('f1') == hashlib.md5('').hexdigest()
run('echo -n hello > f2')
assert fabtools.files.md5sum('f2') == hashlib.md5('hello').hexdigest()
with settings(hide('warnings')):
assert fabtools.files.md5sum('doesnotexist') is None |
<|file_name|>md5.py<|end_file_name|><|fim▁begin|>from __future__ import with_statement
from fabric.api import task
@task
def <|fim_middle|>():
"""
Check MD5 sums (unavailable, empty, with content)
"""
import hashlib
from fabric.api import cd, hide, run, settings
import fabtools
with cd('/tmp'):
run('touch f1')
assert fabtools.files.md5sum('f1') == hashlib.md5('').hexdigest()
run('echo -n hello > f2')
assert fabtools.files.md5sum('f2') == hashlib.md5('hello').hexdigest()
with settings(hide('warnings')):
assert fabtools.files.md5sum('doesnotexist') is None
<|fim▁end|> | md5 |
<|file_name|>test_model.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2.4
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model tests
Unit tests for model utility methods.
"""
__author__ = '[email protected] (Joe Gregorio)'
import httplib2
import unittest
from apiclient.model import makepatch
TEST_CASES = [
# (message, original, modified, expected)
("Remove an item from an object",
{'a': 1, 'b': 2}, {'a': 1}, {'b': None}),
("Add an item to an object",
{'a': 1}, {'a': 1, 'b': 2}, {'b': 2}),
("No changes",
{'a': 1, 'b': 2}, {'a': 1, 'b': 2}, {}),
("Empty objects",
{}, {}, {}),
("Modify an item in an object",
{'a': 1, 'b': 2}, {'a': 1, 'b': 3}, {'b': 3}),
("Change an array",
{'a': 1, 'b': [2, 3]}, {'a': 1, 'b': [2]}, {'b': [2]}),
("Modify a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar', 'baz': 'qaax'}},
{'b': {'baz': 'qaax'}}),
("Modify a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qaax'}]},
{'b': [{'foo':'bar', 'baz': 'qaax'}]}),
("Remove item from a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar'}]},
{'b': [{'foo':'bar'}]}),
("Remove a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar'}},
{'b': {'baz': None}})
]
class TestPatch(unittest.TestCase):
def test_patch(self):
for (msg, orig, mod, expected_patch) in TEST_CASES:
self.assertEqual(expected_patch, makepatch(orig, mod), msg=msg)
<|fim▁hole|><|fim▁end|> | if __name__ == '__main__':
unittest.main() |
<|file_name|>test_model.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2.4
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model tests
Unit tests for model utility methods.
"""
__author__ = '[email protected] (Joe Gregorio)'
import httplib2
import unittest
from apiclient.model import makepatch
TEST_CASES = [
# (message, original, modified, expected)
("Remove an item from an object",
{'a': 1, 'b': 2}, {'a': 1}, {'b': None}),
("Add an item to an object",
{'a': 1}, {'a': 1, 'b': 2}, {'b': 2}),
("No changes",
{'a': 1, 'b': 2}, {'a': 1, 'b': 2}, {}),
("Empty objects",
{}, {}, {}),
("Modify an item in an object",
{'a': 1, 'b': 2}, {'a': 1, 'b': 3}, {'b': 3}),
("Change an array",
{'a': 1, 'b': [2, 3]}, {'a': 1, 'b': [2]}, {'b': [2]}),
("Modify a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar', 'baz': 'qaax'}},
{'b': {'baz': 'qaax'}}),
("Modify a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qaax'}]},
{'b': [{'foo':'bar', 'baz': 'qaax'}]}),
("Remove item from a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar'}]},
{'b': [{'foo':'bar'}]}),
("Remove a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar'}},
{'b': {'baz': None}})
]
class TestPatch(unittest.TestCase):
<|fim_middle|>
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | def test_patch(self):
for (msg, orig, mod, expected_patch) in TEST_CASES:
self.assertEqual(expected_patch, makepatch(orig, mod), msg=msg) |
<|file_name|>test_model.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2.4
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model tests
Unit tests for model utility methods.
"""
__author__ = '[email protected] (Joe Gregorio)'
import httplib2
import unittest
from apiclient.model import makepatch
TEST_CASES = [
# (message, original, modified, expected)
("Remove an item from an object",
{'a': 1, 'b': 2}, {'a': 1}, {'b': None}),
("Add an item to an object",
{'a': 1}, {'a': 1, 'b': 2}, {'b': 2}),
("No changes",
{'a': 1, 'b': 2}, {'a': 1, 'b': 2}, {}),
("Empty objects",
{}, {}, {}),
("Modify an item in an object",
{'a': 1, 'b': 2}, {'a': 1, 'b': 3}, {'b': 3}),
("Change an array",
{'a': 1, 'b': [2, 3]}, {'a': 1, 'b': [2]}, {'b': [2]}),
("Modify a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar', 'baz': 'qaax'}},
{'b': {'baz': 'qaax'}}),
("Modify a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qaax'}]},
{'b': [{'foo':'bar', 'baz': 'qaax'}]}),
("Remove item from a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar'}]},
{'b': [{'foo':'bar'}]}),
("Remove a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar'}},
{'b': {'baz': None}})
]
class TestPatch(unittest.TestCase):
def test_patch(self):
<|fim_middle|>
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | for (msg, orig, mod, expected_patch) in TEST_CASES:
self.assertEqual(expected_patch, makepatch(orig, mod), msg=msg) |
<|file_name|>test_model.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2.4
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model tests
Unit tests for model utility methods.
"""
__author__ = '[email protected] (Joe Gregorio)'
import httplib2
import unittest
from apiclient.model import makepatch
TEST_CASES = [
# (message, original, modified, expected)
("Remove an item from an object",
{'a': 1, 'b': 2}, {'a': 1}, {'b': None}),
("Add an item to an object",
{'a': 1}, {'a': 1, 'b': 2}, {'b': 2}),
("No changes",
{'a': 1, 'b': 2}, {'a': 1, 'b': 2}, {}),
("Empty objects",
{}, {}, {}),
("Modify an item in an object",
{'a': 1, 'b': 2}, {'a': 1, 'b': 3}, {'b': 3}),
("Change an array",
{'a': 1, 'b': [2, 3]}, {'a': 1, 'b': [2]}, {'b': [2]}),
("Modify a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar', 'baz': 'qaax'}},
{'b': {'baz': 'qaax'}}),
("Modify a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qaax'}]},
{'b': [{'foo':'bar', 'baz': 'qaax'}]}),
("Remove item from a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar'}]},
{'b': [{'foo':'bar'}]}),
("Remove a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar'}},
{'b': {'baz': None}})
]
class TestPatch(unittest.TestCase):
def test_patch(self):
for (msg, orig, mod, expected_patch) in TEST_CASES:
self.assertEqual(expected_patch, makepatch(orig, mod), msg=msg)
if __name__ == '__main__':
<|fim_middle|>
<|fim▁end|> | unittest.main() |
<|file_name|>test_model.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2.4
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model tests
Unit tests for model utility methods.
"""
__author__ = '[email protected] (Joe Gregorio)'
import httplib2
import unittest
from apiclient.model import makepatch
TEST_CASES = [
# (message, original, modified, expected)
("Remove an item from an object",
{'a': 1, 'b': 2}, {'a': 1}, {'b': None}),
("Add an item to an object",
{'a': 1}, {'a': 1, 'b': 2}, {'b': 2}),
("No changes",
{'a': 1, 'b': 2}, {'a': 1, 'b': 2}, {}),
("Empty objects",
{}, {}, {}),
("Modify an item in an object",
{'a': 1, 'b': 2}, {'a': 1, 'b': 3}, {'b': 3}),
("Change an array",
{'a': 1, 'b': [2, 3]}, {'a': 1, 'b': [2]}, {'b': [2]}),
("Modify a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar', 'baz': 'qaax'}},
{'b': {'baz': 'qaax'}}),
("Modify a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qaax'}]},
{'b': [{'foo':'bar', 'baz': 'qaax'}]}),
("Remove item from a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar'}]},
{'b': [{'foo':'bar'}]}),
("Remove a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar'}},
{'b': {'baz': None}})
]
class TestPatch(unittest.TestCase):
def <|fim_middle|>(self):
for (msg, orig, mod, expected_patch) in TEST_CASES:
self.assertEqual(expected_patch, makepatch(orig, mod), msg=msg)
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | test_patch |
<|file_name|>Zested.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#!/usr/bin/env python3
from zested.main import main
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>Zested.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
from zested.main import main
if __name__ == "__main__":
<|fim_middle|>
<|fim▁end|> | main() |
<|file_name|>navclass.py<|end_file_name|><|fim▁begin|>from django.template import Library, Node, resolve_variable, TemplateSyntaxError
from django.core.urlresolvers import reverse
register = Library()
@register.simple_tag
def active(request, pattern):<|fim▁hole|> return 'active'
return ''<|fim▁end|> | import re
if re.search(pattern, request.get_full_path()): |
<|file_name|>navclass.py<|end_file_name|><|fim▁begin|>from django.template import Library, Node, resolve_variable, TemplateSyntaxError
from django.core.urlresolvers import reverse
register = Library()
@register.simple_tag
def active(request, pattern):
<|fim_middle|>
<|fim▁end|> | import re
if re.search(pattern, request.get_full_path()):
return 'active'
return '' |
<|file_name|>navclass.py<|end_file_name|><|fim▁begin|>from django.template import Library, Node, resolve_variable, TemplateSyntaxError
from django.core.urlresolvers import reverse
register = Library()
@register.simple_tag
def active(request, pattern):
import re
if re.search(pattern, request.get_full_path()):
<|fim_middle|>
return ''<|fim▁end|> | return 'active' |
<|file_name|>navclass.py<|end_file_name|><|fim▁begin|>from django.template import Library, Node, resolve_variable, TemplateSyntaxError
from django.core.urlresolvers import reverse
register = Library()
@register.simple_tag
def <|fim_middle|>(request, pattern):
import re
if re.search(pattern, request.get_full_path()):
return 'active'
return ''<|fim▁end|> | active |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of CERN Document Server.
# Copyright (C) 2016, 2019 CERN.
#
# CERN Document Server is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# CERN Document Server is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CERN Document Server; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Pytest configuration.
Before running any of the tests you must have initialized the assets using
the ``script scripts/setup-assets.sh``.
"""
from __future__ import absolute_import, print_function
import os<|fim▁hole|>
import pkg_resources
import pytest
from cds_dojson.marc21 import marc21
from dojson.contrib.marc21.utils import create_record, split_blob
from elasticsearch.exceptions import RequestError
from invenio_db import db as _db
from invenio_indexer.api import RecordIndexer
from invenio_pidstore import current_pidstore
from invenio_records.api import Record
from invenio_search import current_search, current_search_client
from selenium import webdriver
from sqlalchemy_utils.functions import create_database, database_exists
from cds.factory import create_app
@pytest.yield_fixture(scope='session', autouse=True)
def base_app(request):
"""Flask application fixture."""
instance_path = tempfile.mkdtemp()
os.environ.update(
APP_INSTANCE_PATH=instance_path
)
app = create_app(
# CELERY_ALWAYS_EAGER=True,
# CELERY_CACHE_BACKEND="memory",
# CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
# CELERY_RESULT_BACKEND="cache",
SECRET_KEY="CHANGE_ME",
SECURITY_PASSWORD_SALT="CHANGE_ME",
MAIL_SUPPRESS_SEND=True,
TESTING=True,
)
with app.app_context():
yield app
# Teardown
shutil.rmtree(instance_path)
@pytest.yield_fixture(scope='session')
def db(base_app):
"""Initialize database."""
# Init
if not database_exists(str(_db.engine.url)):
create_database(str(_db.engine.url))
_db.create_all()
yield _db
# Teardown
_db.session.remove()
_db.drop_all()
@pytest.yield_fixture(scope='session')
def es(base_app):
"""Provide elasticsearch access."""
try:
list(current_search.create())
except RequestError:
list(current_search.delete())
list(current_search.create())
current_search_client.indices.refresh()
yield current_search_client
list(current_search.delete(ignore=[404]))
@pytest.yield_fixture(scope='session', autouse=True)
def app(base_app, es, db):
"""Application with ES and DB."""
yield base_app
def pytest_generate_tests(metafunc):
"""Override pytest's default test collection function.
For each test in this directory which uses the `env_browser` fixture,
the given test is called once for each value found in the
`E2E_WEBDRIVER_BROWSERS` environment variable.
"""
if 'env_browser' in metafunc.fixturenames:
# In Python 2.7 the fallback kwarg of os.environ.get is `failobj`,
# in 3.x it's `default`.
browsers = os.environ.get('E2E_WEBDRIVER_BROWSERS',
'Firefox').split()
metafunc.parametrize('env_browser', browsers, indirect=True)
@pytest.yield_fixture()
def env_browser(request):
"""Fixture for a webdriver instance of the browser."""
if request.param is None:
request.param = "Firefox"
# Create instance of webdriver.`request.param`()
browser = getattr(webdriver, request.param)()
yield browser
# Quit the webdriver instance
browser.quit()
@pytest.fixture()
def demo_records(app):
"""Create demo records."""
data_path = pkg_resources.resource_filename(
'cds.modules.fixtures', 'data/records.xml'
)
with open(data_path) as source:
indexer = RecordIndexer()
with _db.session.begin_nested():
for index, data in enumerate(split_blob(source.read()), start=1):
# create uuid
rec_uuid = uuid.uuid4()
# do translate
record = marc21.do(create_record(data))
# create PID
current_pidstore.minters['recid'](
rec_uuid, record
)
# create record
indexer.index(Record.create(record, id_=rec_uuid))
_db.session.commit()
return data_path<|fim▁end|> | import shutil
import tempfile
import uuid |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.