prompt
large_stringlengths 72
9.34k
| completion
large_stringlengths 0
7.61k
|
---|---|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from constituencies.models import Constituency
from uk_political_parties.models import Party
from elections.models import Election
class Person(models.Model):
name = models.CharField(blank=False, max_length=255)
remote_id = models.CharField(blank=True, max_length=255, null=True)
source_url = models.URLField(blank=True, null=True)
source_name = models.CharField(blank=True, max_length=100)
image_url = models.URLField(blank=True, null=True)
elections = models.ManyToManyField(Election)
parties = models.ManyToManyField(Party, through='PartyMemberships')
constituencies = models.ManyToManyField(Constituency, through='PersonConstituencies')
@property
def current_party(self):
parties = self.partymemberships_set.filter(membership_end=None)
if parties:
return parties[0]
@property
def current_election(self):
return self.elections.filter(active=True)[0]
@property
def current_constituency(self):
<|fim_middle|>
def __unicode__(self):
return "%s (%s)" % (self.name, self.remote_id)
class PartyMemberships(models.Model):
person = models.ForeignKey(Person)
party = models.ForeignKey(Party)
membership_start = models.DateField()
membership_end = models.DateField(null=True)
class PersonConstituencies(models.Model):
person = models.ForeignKey(Person)
constituency = models.ForeignKey(Constituency)
election = models.ForeignKey(Election)
<|fim▁end|> | return self.constituencies.filter(
personconstituencies__election=self.current_election)[0] |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from constituencies.models import Constituency
from uk_political_parties.models import Party
from elections.models import Election
class Person(models.Model):
name = models.CharField(blank=False, max_length=255)
remote_id = models.CharField(blank=True, max_length=255, null=True)
source_url = models.URLField(blank=True, null=True)
source_name = models.CharField(blank=True, max_length=100)
image_url = models.URLField(blank=True, null=True)
elections = models.ManyToManyField(Election)
parties = models.ManyToManyField(Party, through='PartyMemberships')
constituencies = models.ManyToManyField(Constituency, through='PersonConstituencies')
@property
def current_party(self):
parties = self.partymemberships_set.filter(membership_end=None)
if parties:
return parties[0]
@property
def current_election(self):
return self.elections.filter(active=True)[0]
@property
def current_constituency(self):
return self.constituencies.filter(
personconstituencies__election=self.current_election)[0]
def __unicode__(self):
<|fim_middle|>
class PartyMemberships(models.Model):
person = models.ForeignKey(Person)
party = models.ForeignKey(Party)
membership_start = models.DateField()
membership_end = models.DateField(null=True)
class PersonConstituencies(models.Model):
person = models.ForeignKey(Person)
constituency = models.ForeignKey(Constituency)
election = models.ForeignKey(Election)
<|fim▁end|> | return "%s (%s)" % (self.name, self.remote_id) |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from constituencies.models import Constituency
from uk_political_parties.models import Party
from elections.models import Election
class Person(models.Model):
name = models.CharField(blank=False, max_length=255)
remote_id = models.CharField(blank=True, max_length=255, null=True)
source_url = models.URLField(blank=True, null=True)
source_name = models.CharField(blank=True, max_length=100)
image_url = models.URLField(blank=True, null=True)
elections = models.ManyToManyField(Election)
parties = models.ManyToManyField(Party, through='PartyMemberships')
constituencies = models.ManyToManyField(Constituency, through='PersonConstituencies')
@property
def current_party(self):
parties = self.partymemberships_set.filter(membership_end=None)
if parties:
return parties[0]
@property
def current_election(self):
return self.elections.filter(active=True)[0]
@property
def current_constituency(self):
return self.constituencies.filter(
personconstituencies__election=self.current_election)[0]
def __unicode__(self):
return "%s (%s)" % (self.name, self.remote_id)
class PartyMemberships(models.Model):
<|fim_middle|>
class PersonConstituencies(models.Model):
person = models.ForeignKey(Person)
constituency = models.ForeignKey(Constituency)
election = models.ForeignKey(Election)
<|fim▁end|> | person = models.ForeignKey(Person)
party = models.ForeignKey(Party)
membership_start = models.DateField()
membership_end = models.DateField(null=True) |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from constituencies.models import Constituency
from uk_political_parties.models import Party
from elections.models import Election
class Person(models.Model):
name = models.CharField(blank=False, max_length=255)
remote_id = models.CharField(blank=True, max_length=255, null=True)
source_url = models.URLField(blank=True, null=True)
source_name = models.CharField(blank=True, max_length=100)
image_url = models.URLField(blank=True, null=True)
elections = models.ManyToManyField(Election)
parties = models.ManyToManyField(Party, through='PartyMemberships')
constituencies = models.ManyToManyField(Constituency, through='PersonConstituencies')
@property
def current_party(self):
parties = self.partymemberships_set.filter(membership_end=None)
if parties:
return parties[0]
@property
def current_election(self):
return self.elections.filter(active=True)[0]
@property
def current_constituency(self):
return self.constituencies.filter(
personconstituencies__election=self.current_election)[0]
def __unicode__(self):
return "%s (%s)" % (self.name, self.remote_id)
class PartyMemberships(models.Model):
person = models.ForeignKey(Person)
party = models.ForeignKey(Party)
membership_start = models.DateField()
membership_end = models.DateField(null=True)
class PersonConstituencies(models.Model):
<|fim_middle|>
<|fim▁end|> | person = models.ForeignKey(Person)
constituency = models.ForeignKey(Constituency)
election = models.ForeignKey(Election) |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from constituencies.models import Constituency
from uk_political_parties.models import Party
from elections.models import Election
class Person(models.Model):
name = models.CharField(blank=False, max_length=255)
remote_id = models.CharField(blank=True, max_length=255, null=True)
source_url = models.URLField(blank=True, null=True)
source_name = models.CharField(blank=True, max_length=100)
image_url = models.URLField(blank=True, null=True)
elections = models.ManyToManyField(Election)
parties = models.ManyToManyField(Party, through='PartyMemberships')
constituencies = models.ManyToManyField(Constituency, through='PersonConstituencies')
@property
def current_party(self):
parties = self.partymemberships_set.filter(membership_end=None)
if parties:
<|fim_middle|>
@property
def current_election(self):
return self.elections.filter(active=True)[0]
@property
def current_constituency(self):
return self.constituencies.filter(
personconstituencies__election=self.current_election)[0]
def __unicode__(self):
return "%s (%s)" % (self.name, self.remote_id)
class PartyMemberships(models.Model):
person = models.ForeignKey(Person)
party = models.ForeignKey(Party)
membership_start = models.DateField()
membership_end = models.DateField(null=True)
class PersonConstituencies(models.Model):
person = models.ForeignKey(Person)
constituency = models.ForeignKey(Constituency)
election = models.ForeignKey(Election)
<|fim▁end|> | return parties[0] |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from constituencies.models import Constituency
from uk_political_parties.models import Party
from elections.models import Election
class Person(models.Model):
name = models.CharField(blank=False, max_length=255)
remote_id = models.CharField(blank=True, max_length=255, null=True)
source_url = models.URLField(blank=True, null=True)
source_name = models.CharField(blank=True, max_length=100)
image_url = models.URLField(blank=True, null=True)
elections = models.ManyToManyField(Election)
parties = models.ManyToManyField(Party, through='PartyMemberships')
constituencies = models.ManyToManyField(Constituency, through='PersonConstituencies')
@property
def <|fim_middle|>(self):
parties = self.partymemberships_set.filter(membership_end=None)
if parties:
return parties[0]
@property
def current_election(self):
return self.elections.filter(active=True)[0]
@property
def current_constituency(self):
return self.constituencies.filter(
personconstituencies__election=self.current_election)[0]
def __unicode__(self):
return "%s (%s)" % (self.name, self.remote_id)
class PartyMemberships(models.Model):
person = models.ForeignKey(Person)
party = models.ForeignKey(Party)
membership_start = models.DateField()
membership_end = models.DateField(null=True)
class PersonConstituencies(models.Model):
person = models.ForeignKey(Person)
constituency = models.ForeignKey(Constituency)
election = models.ForeignKey(Election)
<|fim▁end|> | current_party |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from constituencies.models import Constituency
from uk_political_parties.models import Party
from elections.models import Election
class Person(models.Model):
name = models.CharField(blank=False, max_length=255)
remote_id = models.CharField(blank=True, max_length=255, null=True)
source_url = models.URLField(blank=True, null=True)
source_name = models.CharField(blank=True, max_length=100)
image_url = models.URLField(blank=True, null=True)
elections = models.ManyToManyField(Election)
parties = models.ManyToManyField(Party, through='PartyMemberships')
constituencies = models.ManyToManyField(Constituency, through='PersonConstituencies')
@property
def current_party(self):
parties = self.partymemberships_set.filter(membership_end=None)
if parties:
return parties[0]
@property
def <|fim_middle|>(self):
return self.elections.filter(active=True)[0]
@property
def current_constituency(self):
return self.constituencies.filter(
personconstituencies__election=self.current_election)[0]
def __unicode__(self):
return "%s (%s)" % (self.name, self.remote_id)
class PartyMemberships(models.Model):
person = models.ForeignKey(Person)
party = models.ForeignKey(Party)
membership_start = models.DateField()
membership_end = models.DateField(null=True)
class PersonConstituencies(models.Model):
person = models.ForeignKey(Person)
constituency = models.ForeignKey(Constituency)
election = models.ForeignKey(Election)
<|fim▁end|> | current_election |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from constituencies.models import Constituency
from uk_political_parties.models import Party
from elections.models import Election
class Person(models.Model):
name = models.CharField(blank=False, max_length=255)
remote_id = models.CharField(blank=True, max_length=255, null=True)
source_url = models.URLField(blank=True, null=True)
source_name = models.CharField(blank=True, max_length=100)
image_url = models.URLField(blank=True, null=True)
elections = models.ManyToManyField(Election)
parties = models.ManyToManyField(Party, through='PartyMemberships')
constituencies = models.ManyToManyField(Constituency, through='PersonConstituencies')
@property
def current_party(self):
parties = self.partymemberships_set.filter(membership_end=None)
if parties:
return parties[0]
@property
def current_election(self):
return self.elections.filter(active=True)[0]
@property
def <|fim_middle|>(self):
return self.constituencies.filter(
personconstituencies__election=self.current_election)[0]
def __unicode__(self):
return "%s (%s)" % (self.name, self.remote_id)
class PartyMemberships(models.Model):
person = models.ForeignKey(Person)
party = models.ForeignKey(Party)
membership_start = models.DateField()
membership_end = models.DateField(null=True)
class PersonConstituencies(models.Model):
person = models.ForeignKey(Person)
constituency = models.ForeignKey(Constituency)
election = models.ForeignKey(Election)
<|fim▁end|> | current_constituency |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from constituencies.models import Constituency
from uk_political_parties.models import Party
from elections.models import Election
class Person(models.Model):
name = models.CharField(blank=False, max_length=255)
remote_id = models.CharField(blank=True, max_length=255, null=True)
source_url = models.URLField(blank=True, null=True)
source_name = models.CharField(blank=True, max_length=100)
image_url = models.URLField(blank=True, null=True)
elections = models.ManyToManyField(Election)
parties = models.ManyToManyField(Party, through='PartyMemberships')
constituencies = models.ManyToManyField(Constituency, through='PersonConstituencies')
@property
def current_party(self):
parties = self.partymemberships_set.filter(membership_end=None)
if parties:
return parties[0]
@property
def current_election(self):
return self.elections.filter(active=True)[0]
@property
def current_constituency(self):
return self.constituencies.filter(
personconstituencies__election=self.current_election)[0]
def <|fim_middle|>(self):
return "%s (%s)" % (self.name, self.remote_id)
class PartyMemberships(models.Model):
person = models.ForeignKey(Person)
party = models.ForeignKey(Party)
membership_start = models.DateField()
membership_end = models.DateField(null=True)
class PersonConstituencies(models.Model):
person = models.ForeignKey(Person)
constituency = models.ForeignKey(Constituency)
election = models.ForeignKey(Election)
<|fim▁end|> | __unicode__ |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>import unittest
import os
import os.path
import json
# The folder holding the test data
data_path = os.path.dirname(__file__)
# Set the temporal config for testing
os.environ['TIMEVIS_CONFIG'] = os.path.join(data_path, 'config.py')
import timevis
class TestExperiment(unittest.TestCase):
def setUp(self):
self.app = timevis.app.test_client()
self.url = '/api/v2/experiment'
def test_post(self):
name = os.path.join(data_path, 'post_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.post(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
def test_get(self):<|fim▁hole|>
def test_put(self):
name = os.path.join(data_path, 'put_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.put(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | resp = self.app.get(self.url)
self.assertIsNotNone(resp.data) |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>import unittest
import os
import os.path
import json
# The folder holding the test data
data_path = os.path.dirname(__file__)
# Set the temporal config for testing
os.environ['TIMEVIS_CONFIG'] = os.path.join(data_path, 'config.py')
import timevis
class TestExperiment(unittest.TestCase):
<|fim_middle|>
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | def setUp(self):
self.app = timevis.app.test_client()
self.url = '/api/v2/experiment'
def test_post(self):
name = os.path.join(data_path, 'post_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.post(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
def test_get(self):
resp = self.app.get(self.url)
self.assertIsNotNone(resp.data)
def test_put(self):
name = os.path.join(data_path, 'put_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.put(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data) |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>import unittest
import os
import os.path
import json
# The folder holding the test data
data_path = os.path.dirname(__file__)
# Set the temporal config for testing
os.environ['TIMEVIS_CONFIG'] = os.path.join(data_path, 'config.py')
import timevis
class TestExperiment(unittest.TestCase):
def setUp(self):
<|fim_middle|>
def test_post(self):
name = os.path.join(data_path, 'post_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.post(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
def test_get(self):
resp = self.app.get(self.url)
self.assertIsNotNone(resp.data)
def test_put(self):
name = os.path.join(data_path, 'put_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.put(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | self.app = timevis.app.test_client()
self.url = '/api/v2/experiment' |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>import unittest
import os
import os.path
import json
# The folder holding the test data
data_path = os.path.dirname(__file__)
# Set the temporal config for testing
os.environ['TIMEVIS_CONFIG'] = os.path.join(data_path, 'config.py')
import timevis
class TestExperiment(unittest.TestCase):
def setUp(self):
self.app = timevis.app.test_client()
self.url = '/api/v2/experiment'
def test_post(self):
<|fim_middle|>
def test_get(self):
resp = self.app.get(self.url)
self.assertIsNotNone(resp.data)
def test_put(self):
name = os.path.join(data_path, 'put_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.put(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | name = os.path.join(data_path, 'post_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.post(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data) |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>import unittest
import os
import os.path
import json
# The folder holding the test data
data_path = os.path.dirname(__file__)
# Set the temporal config for testing
os.environ['TIMEVIS_CONFIG'] = os.path.join(data_path, 'config.py')
import timevis
class TestExperiment(unittest.TestCase):
def setUp(self):
self.app = timevis.app.test_client()
self.url = '/api/v2/experiment'
def test_post(self):
name = os.path.join(data_path, 'post_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.post(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
def test_get(self):
<|fim_middle|>
def test_put(self):
name = os.path.join(data_path, 'put_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.put(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | resp = self.app.get(self.url)
self.assertIsNotNone(resp.data) |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>import unittest
import os
import os.path
import json
# The folder holding the test data
data_path = os.path.dirname(__file__)
# Set the temporal config for testing
os.environ['TIMEVIS_CONFIG'] = os.path.join(data_path, 'config.py')
import timevis
class TestExperiment(unittest.TestCase):
def setUp(self):
self.app = timevis.app.test_client()
self.url = '/api/v2/experiment'
def test_post(self):
name = os.path.join(data_path, 'post_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.post(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
def test_get(self):
resp = self.app.get(self.url)
self.assertIsNotNone(resp.data)
def test_put(self):
<|fim_middle|>
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | name = os.path.join(data_path, 'put_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.put(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data) |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>import unittest
import os
import os.path
import json
# The folder holding the test data
data_path = os.path.dirname(__file__)
# Set the temporal config for testing
os.environ['TIMEVIS_CONFIG'] = os.path.join(data_path, 'config.py')
import timevis
class TestExperiment(unittest.TestCase):
def setUp(self):
self.app = timevis.app.test_client()
self.url = '/api/v2/experiment'
def test_post(self):
name = os.path.join(data_path, 'post_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.post(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
def test_get(self):
resp = self.app.get(self.url)
self.assertIsNotNone(resp.data)
def test_put(self):
name = os.path.join(data_path, 'put_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.put(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
if __name__ == '__main__':
<|fim_middle|>
<|fim▁end|> | unittest.main() |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>import unittest
import os
import os.path
import json
# The folder holding the test data
data_path = os.path.dirname(__file__)
# Set the temporal config for testing
os.environ['TIMEVIS_CONFIG'] = os.path.join(data_path, 'config.py')
import timevis
class TestExperiment(unittest.TestCase):
def <|fim_middle|>(self):
self.app = timevis.app.test_client()
self.url = '/api/v2/experiment'
def test_post(self):
name = os.path.join(data_path, 'post_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.post(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
def test_get(self):
resp = self.app.get(self.url)
self.assertIsNotNone(resp.data)
def test_put(self):
name = os.path.join(data_path, 'put_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.put(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | setUp |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>import unittest
import os
import os.path
import json
# The folder holding the test data
data_path = os.path.dirname(__file__)
# Set the temporal config for testing
os.environ['TIMEVIS_CONFIG'] = os.path.join(data_path, 'config.py')
import timevis
class TestExperiment(unittest.TestCase):
def setUp(self):
self.app = timevis.app.test_client()
self.url = '/api/v2/experiment'
def <|fim_middle|>(self):
name = os.path.join(data_path, 'post_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.post(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
def test_get(self):
resp = self.app.get(self.url)
self.assertIsNotNone(resp.data)
def test_put(self):
name = os.path.join(data_path, 'put_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.put(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | test_post |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>import unittest
import os
import os.path
import json
# The folder holding the test data
data_path = os.path.dirname(__file__)
# Set the temporal config for testing
os.environ['TIMEVIS_CONFIG'] = os.path.join(data_path, 'config.py')
import timevis
class TestExperiment(unittest.TestCase):
def setUp(self):
self.app = timevis.app.test_client()
self.url = '/api/v2/experiment'
def test_post(self):
name = os.path.join(data_path, 'post_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.post(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
def <|fim_middle|>(self):
resp = self.app.get(self.url)
self.assertIsNotNone(resp.data)
def test_put(self):
name = os.path.join(data_path, 'put_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.put(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | test_get |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>import unittest
import os
import os.path
import json
# The folder holding the test data
data_path = os.path.dirname(__file__)
# Set the temporal config for testing
os.environ['TIMEVIS_CONFIG'] = os.path.join(data_path, 'config.py')
import timevis
class TestExperiment(unittest.TestCase):
def setUp(self):
self.app = timevis.app.test_client()
self.url = '/api/v2/experiment'
def test_post(self):
name = os.path.join(data_path, 'post_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.post(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
def test_get(self):
resp = self.app.get(self.url)
self.assertIsNotNone(resp.data)
def <|fim_middle|>(self):
name = os.path.join(data_path, 'put_exp.json')
with open(name) as file:
obj = json.load(file)
resp = self.app.put(self.url, data=json.dumps(obj),
content_type='application/json')
self.assertIsNotNone(resp.data)
if __name__ == '__main__':
unittest.main()
<|fim▁end|> | test_put |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#
# grmpy documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 18 13:05:32 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
# Set variable so that todos are shown in local build
on_rtd = os.environ.get("READTHEDOCS") == "True"
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.ifconfig",
"sphinx.ext.viewcode",
"sphinxcontrib.bibtex",
"sphinx.ext.imgconverter",
]
bibtex_bibfiles = ["source/refs.bib"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "grmpy"
copyright_ = "2018, grmpy-dev team"
author = "grmpy-dev team"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "1.0"
# The full version, including alpha/beta/rc tags.
release = "1.0"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing. We
# want to supress the output on readthedocs.
if on_rtd:
todo_include_todos = False
else:
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "grmpydoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
"pointsize": "12pt",
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
"figure_align": "htbp",
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "grmpy.tex", "grmpy Documentation", "Development Team", "manual")
]
<|fim▁hole|># One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "grmpy", "grmpy Documentation", [author], 1)]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"grmpy",
"grmpy Documentation",
author,
"grmpy",
"One line description of project.",
"Miscellaneous",
)
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright_
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ["search.html"]<|fim▁end|> | # -- Options for manual page output ---------------------------------------
|
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#
# grmpy documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 18 13:05:32 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
# Set variable so that todos are shown in local build
on_rtd = os.environ.get("READTHEDOCS") == "True"
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.ifconfig",
"sphinx.ext.viewcode",
"sphinxcontrib.bibtex",
"sphinx.ext.imgconverter",
]
bibtex_bibfiles = ["source/refs.bib"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "grmpy"
copyright_ = "2018, grmpy-dev team"
author = "grmpy-dev team"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "1.0"
# The full version, including alpha/beta/rc tags.
release = "1.0"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing. We
# want to supress the output on readthedocs.
if on_rtd:
<|fim_middle|>
else:
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "grmpydoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
"pointsize": "12pt",
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
"figure_align": "htbp",
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "grmpy.tex", "grmpy Documentation", "Development Team", "manual")
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "grmpy", "grmpy Documentation", [author], 1)]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"grmpy",
"grmpy Documentation",
author,
"grmpy",
"One line description of project.",
"Miscellaneous",
)
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright_
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ["search.html"]
<|fim▁end|> | todo_include_todos = False |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#
# grmpy documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 18 13:05:32 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
# Set variable so that todos are shown in local build
on_rtd = os.environ.get("READTHEDOCS") == "True"
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.ifconfig",
"sphinx.ext.viewcode",
"sphinxcontrib.bibtex",
"sphinx.ext.imgconverter",
]
bibtex_bibfiles = ["source/refs.bib"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "grmpy"
copyright_ = "2018, grmpy-dev team"
author = "grmpy-dev team"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "1.0"
# The full version, including alpha/beta/rc tags.
release = "1.0"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing. We
# want to supress the output on readthedocs.
if on_rtd:
todo_include_todos = False
else:
<|fim_middle|>
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "grmpydoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
"pointsize": "12pt",
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
"figure_align": "htbp",
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "grmpy.tex", "grmpy Documentation", "Development Team", "manual")
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "grmpy", "grmpy Documentation", [author], 1)]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"grmpy",
"grmpy Documentation",
author,
"grmpy",
"One line description of project.",
"Miscellaneous",
)
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright_
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ["search.html"]
<|fim▁end|> | todo_include_todos = True |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>team_mapping = {
"SY": "Sydney",
"WB": "Western Bulldogs",
"WC": "West Coast",
"HW": "Hawthorn",
"GE": "Geelong",
"FR": "Fremantle",
"RI": "Richmond",
"CW": "Collingwood",
"CA": "Carlton",
"GW": "Greater Western Sydney",
"AD": "Adelaide",
"GC": "Gold Coast",
"ES": "Essendon",
"ME": "Melbourne",
"NM": "North Melbourne",
"PA": "Port Adelaide",
"BL": "Brisbane Lions",
"SK": "St Kilda"
}
def get_team_name(code):
return team_mapping[code]
def get_team_code(full_name):
for code, name in team_mapping.items():
if name == full_name:
return code
return full_name
def get_match_description(response):
match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0]
match_details = match_container.xpath(".//text()").extract()
return {
"round": match_details[1],
"venue": match_details[3],
"date": match_details[6],
"attendance": match_details[8],
"homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(),
"awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(),
"homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()),
"awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first())<|fim▁hole|> for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract():
yield response.urljoin(match)<|fim▁end|> | }
def get_match_urls(response): |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>team_mapping = {
"SY": "Sydney",
"WB": "Western Bulldogs",
"WC": "West Coast",
"HW": "Hawthorn",
"GE": "Geelong",
"FR": "Fremantle",
"RI": "Richmond",
"CW": "Collingwood",
"CA": "Carlton",
"GW": "Greater Western Sydney",
"AD": "Adelaide",
"GC": "Gold Coast",
"ES": "Essendon",
"ME": "Melbourne",
"NM": "North Melbourne",
"PA": "Port Adelaide",
"BL": "Brisbane Lions",
"SK": "St Kilda"
}
def get_team_name(code):
<|fim_middle|>
def get_team_code(full_name):
for code, name in team_mapping.items():
if name == full_name:
return code
return full_name
def get_match_description(response):
match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0]
match_details = match_container.xpath(".//text()").extract()
return {
"round": match_details[1],
"venue": match_details[3],
"date": match_details[6],
"attendance": match_details[8],
"homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(),
"awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(),
"homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()),
"awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first())
}
def get_match_urls(response):
for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract():
yield response.urljoin(match)<|fim▁end|> | return team_mapping[code] |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>team_mapping = {
"SY": "Sydney",
"WB": "Western Bulldogs",
"WC": "West Coast",
"HW": "Hawthorn",
"GE": "Geelong",
"FR": "Fremantle",
"RI": "Richmond",
"CW": "Collingwood",
"CA": "Carlton",
"GW": "Greater Western Sydney",
"AD": "Adelaide",
"GC": "Gold Coast",
"ES": "Essendon",
"ME": "Melbourne",
"NM": "North Melbourne",
"PA": "Port Adelaide",
"BL": "Brisbane Lions",
"SK": "St Kilda"
}
def get_team_name(code):
return team_mapping[code]
def get_team_code(full_name):
<|fim_middle|>
def get_match_description(response):
match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0]
match_details = match_container.xpath(".//text()").extract()
return {
"round": match_details[1],
"venue": match_details[3],
"date": match_details[6],
"attendance": match_details[8],
"homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(),
"awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(),
"homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()),
"awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first())
}
def get_match_urls(response):
for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract():
yield response.urljoin(match)<|fim▁end|> | for code, name in team_mapping.items():
if name == full_name:
return code
return full_name |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>team_mapping = {
"SY": "Sydney",
"WB": "Western Bulldogs",
"WC": "West Coast",
"HW": "Hawthorn",
"GE": "Geelong",
"FR": "Fremantle",
"RI": "Richmond",
"CW": "Collingwood",
"CA": "Carlton",
"GW": "Greater Western Sydney",
"AD": "Adelaide",
"GC": "Gold Coast",
"ES": "Essendon",
"ME": "Melbourne",
"NM": "North Melbourne",
"PA": "Port Adelaide",
"BL": "Brisbane Lions",
"SK": "St Kilda"
}
def get_team_name(code):
return team_mapping[code]
def get_team_code(full_name):
for code, name in team_mapping.items():
if name == full_name:
return code
return full_name
def get_match_description(response):
<|fim_middle|>
def get_match_urls(response):
for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract():
yield response.urljoin(match)<|fim▁end|> | match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0]
match_details = match_container.xpath(".//text()").extract()
return {
"round": match_details[1],
"venue": match_details[3],
"date": match_details[6],
"attendance": match_details[8],
"homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(),
"awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(),
"homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()),
"awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first())
} |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>team_mapping = {
"SY": "Sydney",
"WB": "Western Bulldogs",
"WC": "West Coast",
"HW": "Hawthorn",
"GE": "Geelong",
"FR": "Fremantle",
"RI": "Richmond",
"CW": "Collingwood",
"CA": "Carlton",
"GW": "Greater Western Sydney",
"AD": "Adelaide",
"GC": "Gold Coast",
"ES": "Essendon",
"ME": "Melbourne",
"NM": "North Melbourne",
"PA": "Port Adelaide",
"BL": "Brisbane Lions",
"SK": "St Kilda"
}
def get_team_name(code):
return team_mapping[code]
def get_team_code(full_name):
for code, name in team_mapping.items():
if name == full_name:
return code
return full_name
def get_match_description(response):
match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0]
match_details = match_container.xpath(".//text()").extract()
return {
"round": match_details[1],
"venue": match_details[3],
"date": match_details[6],
"attendance": match_details[8],
"homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(),
"awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(),
"homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()),
"awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first())
}
def get_match_urls(response):
<|fim_middle|>
<|fim▁end|> | for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract():
yield response.urljoin(match) |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>team_mapping = {
"SY": "Sydney",
"WB": "Western Bulldogs",
"WC": "West Coast",
"HW": "Hawthorn",
"GE": "Geelong",
"FR": "Fremantle",
"RI": "Richmond",
"CW": "Collingwood",
"CA": "Carlton",
"GW": "Greater Western Sydney",
"AD": "Adelaide",
"GC": "Gold Coast",
"ES": "Essendon",
"ME": "Melbourne",
"NM": "North Melbourne",
"PA": "Port Adelaide",
"BL": "Brisbane Lions",
"SK": "St Kilda"
}
def get_team_name(code):
return team_mapping[code]
def get_team_code(full_name):
for code, name in team_mapping.items():
if name == full_name:
<|fim_middle|>
return full_name
def get_match_description(response):
match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0]
match_details = match_container.xpath(".//text()").extract()
return {
"round": match_details[1],
"venue": match_details[3],
"date": match_details[6],
"attendance": match_details[8],
"homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(),
"awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(),
"homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()),
"awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first())
}
def get_match_urls(response):
for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract():
yield response.urljoin(match)<|fim▁end|> | return code |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>team_mapping = {
"SY": "Sydney",
"WB": "Western Bulldogs",
"WC": "West Coast",
"HW": "Hawthorn",
"GE": "Geelong",
"FR": "Fremantle",
"RI": "Richmond",
"CW": "Collingwood",
"CA": "Carlton",
"GW": "Greater Western Sydney",
"AD": "Adelaide",
"GC": "Gold Coast",
"ES": "Essendon",
"ME": "Melbourne",
"NM": "North Melbourne",
"PA": "Port Adelaide",
"BL": "Brisbane Lions",
"SK": "St Kilda"
}
def <|fim_middle|>(code):
return team_mapping[code]
def get_team_code(full_name):
for code, name in team_mapping.items():
if name == full_name:
return code
return full_name
def get_match_description(response):
match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0]
match_details = match_container.xpath(".//text()").extract()
return {
"round": match_details[1],
"venue": match_details[3],
"date": match_details[6],
"attendance": match_details[8],
"homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(),
"awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(),
"homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()),
"awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first())
}
def get_match_urls(response):
for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract():
yield response.urljoin(match)<|fim▁end|> | get_team_name |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>team_mapping = {
"SY": "Sydney",
"WB": "Western Bulldogs",
"WC": "West Coast",
"HW": "Hawthorn",
"GE": "Geelong",
"FR": "Fremantle",
"RI": "Richmond",
"CW": "Collingwood",
"CA": "Carlton",
"GW": "Greater Western Sydney",
"AD": "Adelaide",
"GC": "Gold Coast",
"ES": "Essendon",
"ME": "Melbourne",
"NM": "North Melbourne",
"PA": "Port Adelaide",
"BL": "Brisbane Lions",
"SK": "St Kilda"
}
def get_team_name(code):
return team_mapping[code]
def <|fim_middle|>(full_name):
for code, name in team_mapping.items():
if name == full_name:
return code
return full_name
def get_match_description(response):
match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0]
match_details = match_container.xpath(".//text()").extract()
return {
"round": match_details[1],
"venue": match_details[3],
"date": match_details[6],
"attendance": match_details[8],
"homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(),
"awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(),
"homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()),
"awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first())
}
def get_match_urls(response):
for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract():
yield response.urljoin(match)<|fim▁end|> | get_team_code |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>team_mapping = {
"SY": "Sydney",
"WB": "Western Bulldogs",
"WC": "West Coast",
"HW": "Hawthorn",
"GE": "Geelong",
"FR": "Fremantle",
"RI": "Richmond",
"CW": "Collingwood",
"CA": "Carlton",
"GW": "Greater Western Sydney",
"AD": "Adelaide",
"GC": "Gold Coast",
"ES": "Essendon",
"ME": "Melbourne",
"NM": "North Melbourne",
"PA": "Port Adelaide",
"BL": "Brisbane Lions",
"SK": "St Kilda"
}
def get_team_name(code):
return team_mapping[code]
def get_team_code(full_name):
for code, name in team_mapping.items():
if name == full_name:
return code
return full_name
def <|fim_middle|>(response):
match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0]
match_details = match_container.xpath(".//text()").extract()
return {
"round": match_details[1],
"venue": match_details[3],
"date": match_details[6],
"attendance": match_details[8],
"homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(),
"awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(),
"homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()),
"awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first())
}
def get_match_urls(response):
for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract():
yield response.urljoin(match)<|fim▁end|> | get_match_description |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>team_mapping = {
"SY": "Sydney",
"WB": "Western Bulldogs",
"WC": "West Coast",
"HW": "Hawthorn",
"GE": "Geelong",
"FR": "Fremantle",
"RI": "Richmond",
"CW": "Collingwood",
"CA": "Carlton",
"GW": "Greater Western Sydney",
"AD": "Adelaide",
"GC": "Gold Coast",
"ES": "Essendon",
"ME": "Melbourne",
"NM": "North Melbourne",
"PA": "Port Adelaide",
"BL": "Brisbane Lions",
"SK": "St Kilda"
}
def get_team_name(code):
return team_mapping[code]
def get_team_code(full_name):
for code, name in team_mapping.items():
if name == full_name:
return code
return full_name
def get_match_description(response):
match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0]
match_details = match_container.xpath(".//text()").extract()
return {
"round": match_details[1],
"venue": match_details[3],
"date": match_details[6],
"attendance": match_details[8],
"homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(),
"awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(),
"homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()),
"awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first())
}
def <|fim_middle|>(response):
for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract():
yield response.urljoin(match)<|fim▁end|> | get_match_urls |
<|file_name|>test_engines.py<|end_file_name|><|fim▁begin|>import pytest
import salt.engines
from tests.support.mock import MagicMock, patch
def test_engine_module_name():
engine = salt.engines.Engine({}, "foobar.start", {}, {}, {}, {}, name="foobar")
assert engine.name == "foobar"
def test_engine_title_set():
engine = salt.engines.Engine({}, "foobar.start", {}, {}, {}, {}, name="foobar")<|fim▁hole|> with patch("salt.utils.process.appendproctitle", MagicMock()) as mm:
with pytest.raises(KeyError):
# The method does not exist so a KeyError will be raised.
engine.run()
mm.assert_called_with("foobar")<|fim▁end|> | |
<|file_name|>test_engines.py<|end_file_name|><|fim▁begin|>import pytest
import salt.engines
from tests.support.mock import MagicMock, patch
def test_engine_module_name():
<|fim_middle|>
def test_engine_title_set():
engine = salt.engines.Engine({}, "foobar.start", {}, {}, {}, {}, name="foobar")
with patch("salt.utils.process.appendproctitle", MagicMock()) as mm:
with pytest.raises(KeyError):
# The method does not exist so a KeyError will be raised.
engine.run()
mm.assert_called_with("foobar")
<|fim▁end|> | engine = salt.engines.Engine({}, "foobar.start", {}, {}, {}, {}, name="foobar")
assert engine.name == "foobar" |
<|file_name|>test_engines.py<|end_file_name|><|fim▁begin|>import pytest
import salt.engines
from tests.support.mock import MagicMock, patch
def test_engine_module_name():
engine = salt.engines.Engine({}, "foobar.start", {}, {}, {}, {}, name="foobar")
assert engine.name == "foobar"
def test_engine_title_set():
<|fim_middle|>
<|fim▁end|> | engine = salt.engines.Engine({}, "foobar.start", {}, {}, {}, {}, name="foobar")
with patch("salt.utils.process.appendproctitle", MagicMock()) as mm:
with pytest.raises(KeyError):
# The method does not exist so a KeyError will be raised.
engine.run()
mm.assert_called_with("foobar") |
<|file_name|>test_engines.py<|end_file_name|><|fim▁begin|>import pytest
import salt.engines
from tests.support.mock import MagicMock, patch
def <|fim_middle|>():
engine = salt.engines.Engine({}, "foobar.start", {}, {}, {}, {}, name="foobar")
assert engine.name == "foobar"
def test_engine_title_set():
engine = salt.engines.Engine({}, "foobar.start", {}, {}, {}, {}, name="foobar")
with patch("salt.utils.process.appendproctitle", MagicMock()) as mm:
with pytest.raises(KeyError):
# The method does not exist so a KeyError will be raised.
engine.run()
mm.assert_called_with("foobar")
<|fim▁end|> | test_engine_module_name |
<|file_name|>test_engines.py<|end_file_name|><|fim▁begin|>import pytest
import salt.engines
from tests.support.mock import MagicMock, patch
def test_engine_module_name():
engine = salt.engines.Engine({}, "foobar.start", {}, {}, {}, {}, name="foobar")
assert engine.name == "foobar"
def <|fim_middle|>():
engine = salt.engines.Engine({}, "foobar.start", {}, {}, {}, {}, name="foobar")
with patch("salt.utils.process.appendproctitle", MagicMock()) as mm:
with pytest.raises(KeyError):
# The method does not exist so a KeyError will be raised.
engine.run()
mm.assert_called_with("foobar")
<|fim▁end|> | test_engine_title_set |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
from molo.core.scripts import cli<|fim▁hole|> runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def test_get_package(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo')<|fim▁end|> | package = pkg_resources.get_distribution('molo.core')
|
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
<|fim_middle|>
<|fim▁end|> | @patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def test_get_package(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo') |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
<|fim_middle|>
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def test_get_package(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo')
<|fim▁end|> | from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
}) |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
<|fim_middle|>
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def test_get_package(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo')
<|fim▁end|> | from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
}) |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
<|fim_middle|>
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def test_get_package(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo')
<|fim▁end|> | from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
}) |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
<|fim_middle|>
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def test_get_package(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo')
<|fim▁end|> | from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
}) |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
<|fim_middle|>
def test_get_package(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo')
<|fim▁end|> | package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo')) |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def test_get_package(self):
<|fim_middle|>
<|fim▁end|> | from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo') |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def <|fim_middle|>(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def test_get_package(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo')
<|fim▁end|> | test_scaffold |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def <|fim_middle|>(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def test_get_package(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo')
<|fim▁end|> | test_scaffold_with_custom_dir |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def <|fim_middle|>(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def test_get_package(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo')
<|fim▁end|> | test_scaffold_with_requirements |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def <|fim_middle|>(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def test_get_package(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo')
<|fim▁end|> | test_scaffold_with_includes |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def <|fim_middle|>(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def test_get_package(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo')
<|fim▁end|> | test_unpack |
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
import pkg_resources
from mock import patch
from click import UsageError
from click.testing import CliRunner
class TestCli(TestCase):
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_custom_dir(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'bar',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_requirements(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--require', 'bar'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': ('bar',),
'include': (),
}
})
@patch('molo.core.cookiecutter.cookiecutter')
def test_scaffold_with_includes(self, mock_cookiecutter):
from molo.core.scripts import cli
package = pkg_resources.get_distribution('molo.core')
runner = CliRunner()
runner.invoke(cli.scaffold, ['foo', '--include', 'bar', 'baz'])
[call] = mock_cookiecutter.call_args_list
args, kwargs = call
self.assertTrue(kwargs['extra_context'].pop('secret_key'))
self.assertEqual(kwargs, {
'no_input': True,
'extra_context': {
'app_name': 'foo',
'directory': 'foo',
'author': 'Praekelt Foundation',
'author_email': '[email protected]',
'url': None,
'license': 'BSD',
'molo_version': package.version,
'require': (),
'include': (('bar', 'baz'),),
}
})
@patch('molo.core.scripts.cli.get_package')
@patch('molo.core.scripts.cli.get_template_dirs')
@patch('shutil.copytree')
def test_unpack(self, mock_copytree, mock_get_template_dirs,
mock_get_package):
package = pkg_resources.get_distribution('molo.core')
mock_get_package.return_value = package
mock_get_template_dirs.return_value = ['foo']
mock_copytree.return_value = True
from molo.core.scripts import cli
runner = CliRunner()
runner.invoke(cli.unpack_templates, ['app1', 'app2'])
mock_copytree.assert_called_with(
pkg_resources.resource_filename('molo.core', 'templates/foo'),
pkg_resources.resource_filename('molo.core', 'templates/foo'))
def <|fim_middle|>(self):
from molo.core.scripts.cli import get_package
self.assertRaisesRegexp(
UsageError, 'molo.foo is not installed.', get_package, 'molo.foo')
<|fim▁end|> | test_get_package |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):<|fim▁hole|>
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path<|fim▁end|> | """A browser that can be controlled. |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
<|fim_middle|>
<|fim▁end|> | """A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
<|fim_middle|>
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
<|fim_middle|>
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | return 'PossibleBrowser(app_type=%s)' % self.app_type |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
<|fim_middle|>
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | return self.app_type |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
<|fim_middle|>
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | return self._supports_tab_control |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
<|fim_middle|>
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | raise NotImplementedError() |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
<|fim_middle|>
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | raise NotImplementedError() |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
<|fim_middle|>
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | """Tests for extension support."""
raise NotImplementedError() |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
<|fim_middle|>
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | return False |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
<|fim_middle|>
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | pass |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
<|fim_middle|>
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | pass |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
<|fim_middle|>
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | return -1 |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
<|fim_middle|>
<|fim▁end|> | self._credentials_path = credentials_path |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def <|fim_middle|>(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | __init__ |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def <|fim_middle|>(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | __repr__ |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def <|fim_middle|>(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | browser_type |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def <|fim_middle|>(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | supports_tab_control |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def <|fim_middle|>(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | _InitPlatformIfNeeded |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def <|fim_middle|>(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | Create |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def <|fim_middle|>(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | SupportsOptions |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def <|fim_middle|>(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | IsRemote |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def <|fim_middle|>(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | RunRemote |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def <|fim_middle|>(self):
pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | UpdateExecutableIfNeeded |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def <|fim_middle|>(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | last_modification_time |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):
pass
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
return -1
def <|fim_middle|>(self, credentials_path):
self._credentials_path = credentials_path
<|fim▁end|> | SetCredentialsPath |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def add_child(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def save_file(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},<|fim▁hole|> 'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")<|fim▁end|> | 'METADATA': {}, |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
<|fim_middle|>
def add_child(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def save_file(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")
<|fim▁end|> | os.environ['PATH'] = gviz_path + ";" + os.environ['PATH'] |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def add_child(graph, child_id, child_label, parent_id, colour):
<|fim_middle|>
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def save_file(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")
<|fim▁end|> | """
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node)) |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def add_child(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def add_children(graph, parent_id, d, level=0):
<|fim_middle|>
def save_file(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")
<|fim▁end|> | blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1) |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def add_child(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def save_file(graph, fn):
<|fim_middle|>
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")
<|fim▁end|> | filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename) |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def add_child(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def save_file(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def main(gviz_path, layer_only=False):
<|fim_middle|>
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")
<|fim▁end|> | graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn) |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def add_child(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def save_file(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
<|fim_middle|>
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")
<|fim▁end|> | root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes" |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def add_child(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def save_file(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
<|fim_middle|>
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")
<|fim▁end|> | fn = "map_classes"
root, = classes.keys() |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def add_child(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def save_file(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
<|fim_middle|>
<|fim▁end|> | gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!") |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def <|fim_middle|>(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def add_child(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def save_file(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")
<|fim▁end|> | graphviz_setup |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def <|fim_middle|>(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def save_file(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")
<|fim▁end|> | add_child |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def add_child(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def <|fim_middle|>(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def save_file(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")
<|fim▁end|> | add_children |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def add_child(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def <|fim_middle|>(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def main(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")
<|fim▁end|> | save_file |
<|file_name|>class_diagrams.py<|end_file_name|><|fim▁begin|>r"""
Create MapServer class diagrams
Requires https://graphviz.gitlab.io/_pages/Download/Download_windows.html
https://stackoverflow.com/questions/1494492/graphviz-how-to-go-from-dot-to-a-graph
For DOT languge see http://www.graphviz.org/doc/info/attrs.html
cd C:\Program Files (x86)\Graphviz2.38\bin
dot -Tpng D:\GitHub\mappyfile\mapfile_classes.dot -o outfile.png
outfile.png
For Entity Relationship diagrams:
https://graphviz.readthedocs.io/en/stable/examples.html#er-py
"""
import os
import pydot
# import pprint
FONT = "Lucida Sans"
def graphviz_setup(gviz_path):
os.environ['PATH'] = gviz_path + ";" + os.environ['PATH']
def add_child(graph, child_id, child_label, parent_id, colour):
"""
http://www.graphviz.org/doc/info/shapes.html#polygon
"""
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT)
graph.add_node(node)
graph.add_edge(pydot.Edge(parent_id, node))
def add_children(graph, parent_id, d, level=0):
blue = "#6b6bd1"
white = "#fdfefd"
green = "#33a333"
colours = [blue, white, green] * 3
for class_, children in d.items():
colour = colours[level]
child_label = class_
child_id = parent_id + "_" + class_
add_child(graph, child_id, child_label, parent_id, colour)
add_children(graph, child_id, children, level+1)
def save_file(graph, fn):
filename = "%s.png" % fn
graph.write_png(filename)
graph.write("%s.dot" % fn)
os.startfile(filename)
def <|fim_middle|>(gviz_path, layer_only=False):
graphviz_setup(gviz_path)
graph = pydot.Dot(graph_type='digraph', rankdir="TB")
layer_children = {
'CLASS': {
'LABEL': {'STYLE': {}},
'CONNECTIONOPTIONS': {},
'LEADER': {'STYLE': {}},
'STYLE': {},
'VALIDATION': {}
},
'CLUSTER': {},
'COMPOSITE': {},
'FEATURE': {'POINTS': {}},
'GRID': {},
'JOIN': {},
'METADATA': {},
'PROJECTION': {},
'SCALETOKEN': {'VALUES': {}},
'VALIDATION': {}
}
# pprint.pprint(layer_children)
classes = {
"MAP": {
"LAYER": layer_children,
'LEGEND': {'LABEL': {}},
'PROJECTION': {},
'QUERYMAP': {},
'REFERENCE': {},
'SCALEBAR': {'LABEL': {}},
'SYMBOL': {},
'WEB': {'METADATA': {}, 'VALIDATION': {}}
}
}
if layer_only:
root = "LAYER"
classes = classes["MAP"]
fn = "layer_classes"
else:
fn = "map_classes"
root, = classes.keys()
node = pydot.Node(root, style="filled", fillcolor="#33a333", label=root, fontname=FONT, shape="polygon")
graph.add_node(node)
add_children(graph, root, classes[root])
save_file(graph, fn)
if __name__ == "__main__":
gviz_path = r"C:\Program Files (x86)\Graphviz2.38\bin"
main(gviz_path, True)
main(gviz_path, False)
print("Done!")
<|fim▁end|> | main |
<|file_name|>validator.py<|end_file_name|><|fim▁begin|>import socket
import re
from xii import error, util
# sample validator
# keys = Dict(
# [
# RequiredKey("foo", String(), desc="A string to manipulate something"),
# Key("bar", String(), desc="something usefull")
# ],
# desc="Implement this stuff as you want"
# )
class Validator():
def __init__(self, example=None, description=None):
self._description = description
self._example = example
def structure(self, accessor):
if accessor == "example":
return self._example
return self._description
class TypeCheck(Validator):
want_type = None
want = "none"
def __init__(self, example, desc=None):
if desc is None:
desc = self.want
Validator.__init__(self, example, desc)
def validate(self, pre, structure):
if isinstance(structure, self.want_type):
return True
raise error.ValidatorError("{} needs to be {}".format(pre, self.want))
return False
class Int(TypeCheck):
want = "int"
want_type = int
class Bool(TypeCheck):
want = "bool"
want_type = bool
class String(TypeCheck):
want = "string"
want_type = str
class Ip(TypeCheck):
want = "ip"
want_type = str
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
try:
socket.inet_pton(socket.AF_INET, structure)
return True
except socket.error:
try:
socket.inet_pton(socket.AF_INET6, structure)
return True
except socket.error:
pass
raise error.ValidatorError("{} is not a valid IP address".format(pre))
class ByteSize(TypeCheck):
want = "memory"
want_type = str
validator = re.compile("(?P<value>\d+)(\ *)(?P<unit>[kMGT])")
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
if self.validator.match(structure):
return True
else:
raise error.ValidatorError("{} is not a valid memory size".format(pre))
class List(TypeCheck):
want = "list"
want_type = list
def __init__(self, schema, desc=None):
TypeCheck.__init__(self, desc)
self.schema = schema
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate_each(item):
return self.schema.validate(pre, item)
return sum(map(_validate_each, structure)) > 1
<|fim▁hole|>class Or(Validator):
def __init__(self, schemas, desc=None, exclusive=True):
Validator.__init__(self, desc)
self.schemas = schemas
self.exclusive = exclusive
def validate(self, pre, structure):
errors = []
def _validate_each(schema):
try:
return schema.validate(pre, structure)
except error.ValidatorError as err:
errors.append(err)
return False
state = sum(map(_validate_each, self.schemas))
if self.exclusive and (state > 1 or state == 0):
def _error_lines():
it = iter(errors)
yield " ".join(next(it).error())
for err in it:
yield "or"
yield " ".join(err.error())
raise error.ValidatorError(["{} is ambigous:".format(pre)] +
list(_error_lines()))
return True
def structure(self, accessor):
desc = []
descs = [ s.structure(accessor) for s in self.schemas ]
for d in descs[:-1]:
desc.append(d)
desc.append("__or__")
desc.append(descs[-1])
return desc
# Key validators --------------------------------------------------------------
class KeyValidator(Validator):
def structure(self, accessor, overwrite=None):
name = self.name
if overwrite:
name = overwrite
return ("{}".format(name), self.schema.structure(accessor))
class VariableKeys(KeyValidator):
def __init__(self, schema, example, desc=None):
KeyValidator.__init__(self, desc, example)
self.name = "*"
self.example = example
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
def _validate_each(pair):
(name, next_structure) = pair
return self.schema.validate(pre + " > " + name, next_structure)
return sum(map(_validate_each, structure.items())) >= 1
def structure(self, accessor):
if accessor == "example":
return KeyValidator.structure(self, accessor, self.example)
return KeyValidator.structure(self, accessor)
class Key(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
KeyValidator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
return False
return self.schema.validate(pre + " > " + self.name, value_of_key)
class RequiredKey(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
Validator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
raise error.ValidatorError("{} must have {} "
"defined".format(pre, self.name))
return self.schema.validate(pre + " > " + self.name, value_of_key)
class Dict(TypeCheck):
want = "dictonary"
want_type = dict
def __init__(self, schemas, desc=None):
TypeCheck.__init__(self, desc)
self.schemas = schemas
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate(schema):
return schema.validate(pre, structure)
return sum(map(_validate, self.schemas)) >= 1
def structure(self, accessor):
desc_dict = {}
for key, value in [s.structure(accessor) for s in self.schemas]:
desc_dict[key] = value
return desc_dict<|fim▁end|> | def structure(self, accessor):
return [self.schema.structure(accessor)]
|
<|file_name|>validator.py<|end_file_name|><|fim▁begin|>import socket
import re
from xii import error, util
# sample validator
# keys = Dict(
# [
# RequiredKey("foo", String(), desc="A string to manipulate something"),
# Key("bar", String(), desc="something usefull")
# ],
# desc="Implement this stuff as you want"
# )
class Validator():
<|fim_middle|>
class TypeCheck(Validator):
want_type = None
want = "none"
def __init__(self, example, desc=None):
if desc is None:
desc = self.want
Validator.__init__(self, example, desc)
def validate(self, pre, structure):
if isinstance(structure, self.want_type):
return True
raise error.ValidatorError("{} needs to be {}".format(pre, self.want))
return False
class Int(TypeCheck):
want = "int"
want_type = int
class Bool(TypeCheck):
want = "bool"
want_type = bool
class String(TypeCheck):
want = "string"
want_type = str
class Ip(TypeCheck):
want = "ip"
want_type = str
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
try:
socket.inet_pton(socket.AF_INET, structure)
return True
except socket.error:
try:
socket.inet_pton(socket.AF_INET6, structure)
return True
except socket.error:
pass
raise error.ValidatorError("{} is not a valid IP address".format(pre))
class ByteSize(TypeCheck):
want = "memory"
want_type = str
validator = re.compile("(?P<value>\d+)(\ *)(?P<unit>[kMGT])")
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
if self.validator.match(structure):
return True
else:
raise error.ValidatorError("{} is not a valid memory size".format(pre))
class List(TypeCheck):
want = "list"
want_type = list
def __init__(self, schema, desc=None):
TypeCheck.__init__(self, desc)
self.schema = schema
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate_each(item):
return self.schema.validate(pre, item)
return sum(map(_validate_each, structure)) > 1
def structure(self, accessor):
return [self.schema.structure(accessor)]
class Or(Validator):
def __init__(self, schemas, desc=None, exclusive=True):
Validator.__init__(self, desc)
self.schemas = schemas
self.exclusive = exclusive
def validate(self, pre, structure):
errors = []
def _validate_each(schema):
try:
return schema.validate(pre, structure)
except error.ValidatorError as err:
errors.append(err)
return False
state = sum(map(_validate_each, self.schemas))
if self.exclusive and (state > 1 or state == 0):
def _error_lines():
it = iter(errors)
yield " ".join(next(it).error())
for err in it:
yield "or"
yield " ".join(err.error())
raise error.ValidatorError(["{} is ambigous:".format(pre)] +
list(_error_lines()))
return True
def structure(self, accessor):
desc = []
descs = [ s.structure(accessor) for s in self.schemas ]
for d in descs[:-1]:
desc.append(d)
desc.append("__or__")
desc.append(descs[-1])
return desc
# Key validators --------------------------------------------------------------
class KeyValidator(Validator):
def structure(self, accessor, overwrite=None):
name = self.name
if overwrite:
name = overwrite
return ("{}".format(name), self.schema.structure(accessor))
class VariableKeys(KeyValidator):
def __init__(self, schema, example, desc=None):
KeyValidator.__init__(self, desc, example)
self.name = "*"
self.example = example
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
def _validate_each(pair):
(name, next_structure) = pair
return self.schema.validate(pre + " > " + name, next_structure)
return sum(map(_validate_each, structure.items())) >= 1
def structure(self, accessor):
if accessor == "example":
return KeyValidator.structure(self, accessor, self.example)
return KeyValidator.structure(self, accessor)
class Key(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
KeyValidator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
return False
return self.schema.validate(pre + " > " + self.name, value_of_key)
class RequiredKey(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
Validator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
raise error.ValidatorError("{} must have {} "
"defined".format(pre, self.name))
return self.schema.validate(pre + " > " + self.name, value_of_key)
class Dict(TypeCheck):
want = "dictonary"
want_type = dict
def __init__(self, schemas, desc=None):
TypeCheck.__init__(self, desc)
self.schemas = schemas
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate(schema):
return schema.validate(pre, structure)
return sum(map(_validate, self.schemas)) >= 1
def structure(self, accessor):
desc_dict = {}
for key, value in [s.structure(accessor) for s in self.schemas]:
desc_dict[key] = value
return desc_dict
<|fim▁end|> | def __init__(self, example=None, description=None):
self._description = description
self._example = example
def structure(self, accessor):
if accessor == "example":
return self._example
return self._description |
<|file_name|>validator.py<|end_file_name|><|fim▁begin|>import socket
import re
from xii import error, util
# sample validator
# keys = Dict(
# [
# RequiredKey("foo", String(), desc="A string to manipulate something"),
# Key("bar", String(), desc="something usefull")
# ],
# desc="Implement this stuff as you want"
# )
class Validator():
def __init__(self, example=None, description=None):
<|fim_middle|>
def structure(self, accessor):
if accessor == "example":
return self._example
return self._description
class TypeCheck(Validator):
want_type = None
want = "none"
def __init__(self, example, desc=None):
if desc is None:
desc = self.want
Validator.__init__(self, example, desc)
def validate(self, pre, structure):
if isinstance(structure, self.want_type):
return True
raise error.ValidatorError("{} needs to be {}".format(pre, self.want))
return False
class Int(TypeCheck):
want = "int"
want_type = int
class Bool(TypeCheck):
want = "bool"
want_type = bool
class String(TypeCheck):
want = "string"
want_type = str
class Ip(TypeCheck):
want = "ip"
want_type = str
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
try:
socket.inet_pton(socket.AF_INET, structure)
return True
except socket.error:
try:
socket.inet_pton(socket.AF_INET6, structure)
return True
except socket.error:
pass
raise error.ValidatorError("{} is not a valid IP address".format(pre))
class ByteSize(TypeCheck):
want = "memory"
want_type = str
validator = re.compile("(?P<value>\d+)(\ *)(?P<unit>[kMGT])")
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
if self.validator.match(structure):
return True
else:
raise error.ValidatorError("{} is not a valid memory size".format(pre))
class List(TypeCheck):
want = "list"
want_type = list
def __init__(self, schema, desc=None):
TypeCheck.__init__(self, desc)
self.schema = schema
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate_each(item):
return self.schema.validate(pre, item)
return sum(map(_validate_each, structure)) > 1
def structure(self, accessor):
return [self.schema.structure(accessor)]
class Or(Validator):
def __init__(self, schemas, desc=None, exclusive=True):
Validator.__init__(self, desc)
self.schemas = schemas
self.exclusive = exclusive
def validate(self, pre, structure):
errors = []
def _validate_each(schema):
try:
return schema.validate(pre, structure)
except error.ValidatorError as err:
errors.append(err)
return False
state = sum(map(_validate_each, self.schemas))
if self.exclusive and (state > 1 or state == 0):
def _error_lines():
it = iter(errors)
yield " ".join(next(it).error())
for err in it:
yield "or"
yield " ".join(err.error())
raise error.ValidatorError(["{} is ambigous:".format(pre)] +
list(_error_lines()))
return True
def structure(self, accessor):
desc = []
descs = [ s.structure(accessor) for s in self.schemas ]
for d in descs[:-1]:
desc.append(d)
desc.append("__or__")
desc.append(descs[-1])
return desc
# Key validators --------------------------------------------------------------
class KeyValidator(Validator):
def structure(self, accessor, overwrite=None):
name = self.name
if overwrite:
name = overwrite
return ("{}".format(name), self.schema.structure(accessor))
class VariableKeys(KeyValidator):
def __init__(self, schema, example, desc=None):
KeyValidator.__init__(self, desc, example)
self.name = "*"
self.example = example
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
def _validate_each(pair):
(name, next_structure) = pair
return self.schema.validate(pre + " > " + name, next_structure)
return sum(map(_validate_each, structure.items())) >= 1
def structure(self, accessor):
if accessor == "example":
return KeyValidator.structure(self, accessor, self.example)
return KeyValidator.structure(self, accessor)
class Key(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
KeyValidator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
return False
return self.schema.validate(pre + " > " + self.name, value_of_key)
class RequiredKey(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
Validator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
raise error.ValidatorError("{} must have {} "
"defined".format(pre, self.name))
return self.schema.validate(pre + " > " + self.name, value_of_key)
class Dict(TypeCheck):
want = "dictonary"
want_type = dict
def __init__(self, schemas, desc=None):
TypeCheck.__init__(self, desc)
self.schemas = schemas
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate(schema):
return schema.validate(pre, structure)
return sum(map(_validate, self.schemas)) >= 1
def structure(self, accessor):
desc_dict = {}
for key, value in [s.structure(accessor) for s in self.schemas]:
desc_dict[key] = value
return desc_dict
<|fim▁end|> | self._description = description
self._example = example |
<|file_name|>validator.py<|end_file_name|><|fim▁begin|>import socket
import re
from xii import error, util
# sample validator
# keys = Dict(
# [
# RequiredKey("foo", String(), desc="A string to manipulate something"),
# Key("bar", String(), desc="something usefull")
# ],
# desc="Implement this stuff as you want"
# )
class Validator():
def __init__(self, example=None, description=None):
self._description = description
self._example = example
def structure(self, accessor):
<|fim_middle|>
class TypeCheck(Validator):
want_type = None
want = "none"
def __init__(self, example, desc=None):
if desc is None:
desc = self.want
Validator.__init__(self, example, desc)
def validate(self, pre, structure):
if isinstance(structure, self.want_type):
return True
raise error.ValidatorError("{} needs to be {}".format(pre, self.want))
return False
class Int(TypeCheck):
want = "int"
want_type = int
class Bool(TypeCheck):
want = "bool"
want_type = bool
class String(TypeCheck):
want = "string"
want_type = str
class Ip(TypeCheck):
want = "ip"
want_type = str
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
try:
socket.inet_pton(socket.AF_INET, structure)
return True
except socket.error:
try:
socket.inet_pton(socket.AF_INET6, structure)
return True
except socket.error:
pass
raise error.ValidatorError("{} is not a valid IP address".format(pre))
class ByteSize(TypeCheck):
want = "memory"
want_type = str
validator = re.compile("(?P<value>\d+)(\ *)(?P<unit>[kMGT])")
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
if self.validator.match(structure):
return True
else:
raise error.ValidatorError("{} is not a valid memory size".format(pre))
class List(TypeCheck):
want = "list"
want_type = list
def __init__(self, schema, desc=None):
TypeCheck.__init__(self, desc)
self.schema = schema
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate_each(item):
return self.schema.validate(pre, item)
return sum(map(_validate_each, structure)) > 1
def structure(self, accessor):
return [self.schema.structure(accessor)]
class Or(Validator):
def __init__(self, schemas, desc=None, exclusive=True):
Validator.__init__(self, desc)
self.schemas = schemas
self.exclusive = exclusive
def validate(self, pre, structure):
errors = []
def _validate_each(schema):
try:
return schema.validate(pre, structure)
except error.ValidatorError as err:
errors.append(err)
return False
state = sum(map(_validate_each, self.schemas))
if self.exclusive and (state > 1 or state == 0):
def _error_lines():
it = iter(errors)
yield " ".join(next(it).error())
for err in it:
yield "or"
yield " ".join(err.error())
raise error.ValidatorError(["{} is ambigous:".format(pre)] +
list(_error_lines()))
return True
def structure(self, accessor):
desc = []
descs = [ s.structure(accessor) for s in self.schemas ]
for d in descs[:-1]:
desc.append(d)
desc.append("__or__")
desc.append(descs[-1])
return desc
# Key validators --------------------------------------------------------------
class KeyValidator(Validator):
def structure(self, accessor, overwrite=None):
name = self.name
if overwrite:
name = overwrite
return ("{}".format(name), self.schema.structure(accessor))
class VariableKeys(KeyValidator):
def __init__(self, schema, example, desc=None):
KeyValidator.__init__(self, desc, example)
self.name = "*"
self.example = example
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
def _validate_each(pair):
(name, next_structure) = pair
return self.schema.validate(pre + " > " + name, next_structure)
return sum(map(_validate_each, structure.items())) >= 1
def structure(self, accessor):
if accessor == "example":
return KeyValidator.structure(self, accessor, self.example)
return KeyValidator.structure(self, accessor)
class Key(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
KeyValidator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
return False
return self.schema.validate(pre + " > " + self.name, value_of_key)
class RequiredKey(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
Validator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
raise error.ValidatorError("{} must have {} "
"defined".format(pre, self.name))
return self.schema.validate(pre + " > " + self.name, value_of_key)
class Dict(TypeCheck):
want = "dictonary"
want_type = dict
def __init__(self, schemas, desc=None):
TypeCheck.__init__(self, desc)
self.schemas = schemas
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate(schema):
return schema.validate(pre, structure)
return sum(map(_validate, self.schemas)) >= 1
def structure(self, accessor):
desc_dict = {}
for key, value in [s.structure(accessor) for s in self.schemas]:
desc_dict[key] = value
return desc_dict
<|fim▁end|> | if accessor == "example":
return self._example
return self._description |
<|file_name|>validator.py<|end_file_name|><|fim▁begin|>import socket
import re
from xii import error, util
# sample validator
# keys = Dict(
# [
# RequiredKey("foo", String(), desc="A string to manipulate something"),
# Key("bar", String(), desc="something usefull")
# ],
# desc="Implement this stuff as you want"
# )
class Validator():
def __init__(self, example=None, description=None):
self._description = description
self._example = example
def structure(self, accessor):
if accessor == "example":
return self._example
return self._description
class TypeCheck(Validator):
<|fim_middle|>
class Int(TypeCheck):
want = "int"
want_type = int
class Bool(TypeCheck):
want = "bool"
want_type = bool
class String(TypeCheck):
want = "string"
want_type = str
class Ip(TypeCheck):
want = "ip"
want_type = str
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
try:
socket.inet_pton(socket.AF_INET, structure)
return True
except socket.error:
try:
socket.inet_pton(socket.AF_INET6, structure)
return True
except socket.error:
pass
raise error.ValidatorError("{} is not a valid IP address".format(pre))
class ByteSize(TypeCheck):
want = "memory"
want_type = str
validator = re.compile("(?P<value>\d+)(\ *)(?P<unit>[kMGT])")
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
if self.validator.match(structure):
return True
else:
raise error.ValidatorError("{} is not a valid memory size".format(pre))
class List(TypeCheck):
want = "list"
want_type = list
def __init__(self, schema, desc=None):
TypeCheck.__init__(self, desc)
self.schema = schema
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate_each(item):
return self.schema.validate(pre, item)
return sum(map(_validate_each, structure)) > 1
def structure(self, accessor):
return [self.schema.structure(accessor)]
class Or(Validator):
def __init__(self, schemas, desc=None, exclusive=True):
Validator.__init__(self, desc)
self.schemas = schemas
self.exclusive = exclusive
def validate(self, pre, structure):
errors = []
def _validate_each(schema):
try:
return schema.validate(pre, structure)
except error.ValidatorError as err:
errors.append(err)
return False
state = sum(map(_validate_each, self.schemas))
if self.exclusive and (state > 1 or state == 0):
def _error_lines():
it = iter(errors)
yield " ".join(next(it).error())
for err in it:
yield "or"
yield " ".join(err.error())
raise error.ValidatorError(["{} is ambigous:".format(pre)] +
list(_error_lines()))
return True
def structure(self, accessor):
desc = []
descs = [ s.structure(accessor) for s in self.schemas ]
for d in descs[:-1]:
desc.append(d)
desc.append("__or__")
desc.append(descs[-1])
return desc
# Key validators --------------------------------------------------------------
class KeyValidator(Validator):
def structure(self, accessor, overwrite=None):
name = self.name
if overwrite:
name = overwrite
return ("{}".format(name), self.schema.structure(accessor))
class VariableKeys(KeyValidator):
def __init__(self, schema, example, desc=None):
KeyValidator.__init__(self, desc, example)
self.name = "*"
self.example = example
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
def _validate_each(pair):
(name, next_structure) = pair
return self.schema.validate(pre + " > " + name, next_structure)
return sum(map(_validate_each, structure.items())) >= 1
def structure(self, accessor):
if accessor == "example":
return KeyValidator.structure(self, accessor, self.example)
return KeyValidator.structure(self, accessor)
class Key(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
KeyValidator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
return False
return self.schema.validate(pre + " > " + self.name, value_of_key)
class RequiredKey(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
Validator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
raise error.ValidatorError("{} must have {} "
"defined".format(pre, self.name))
return self.schema.validate(pre + " > " + self.name, value_of_key)
class Dict(TypeCheck):
want = "dictonary"
want_type = dict
def __init__(self, schemas, desc=None):
TypeCheck.__init__(self, desc)
self.schemas = schemas
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate(schema):
return schema.validate(pre, structure)
return sum(map(_validate, self.schemas)) >= 1
def structure(self, accessor):
desc_dict = {}
for key, value in [s.structure(accessor) for s in self.schemas]:
desc_dict[key] = value
return desc_dict
<|fim▁end|> | want_type = None
want = "none"
def __init__(self, example, desc=None):
if desc is None:
desc = self.want
Validator.__init__(self, example, desc)
def validate(self, pre, structure):
if isinstance(structure, self.want_type):
return True
raise error.ValidatorError("{} needs to be {}".format(pre, self.want))
return False |
<|file_name|>validator.py<|end_file_name|><|fim▁begin|>import socket
import re
from xii import error, util
# sample validator
# keys = Dict(
# [
# RequiredKey("foo", String(), desc="A string to manipulate something"),
# Key("bar", String(), desc="something usefull")
# ],
# desc="Implement this stuff as you want"
# )
class Validator():
def __init__(self, example=None, description=None):
self._description = description
self._example = example
def structure(self, accessor):
if accessor == "example":
return self._example
return self._description
class TypeCheck(Validator):
want_type = None
want = "none"
def __init__(self, example, desc=None):
<|fim_middle|>
def validate(self, pre, structure):
if isinstance(structure, self.want_type):
return True
raise error.ValidatorError("{} needs to be {}".format(pre, self.want))
return False
class Int(TypeCheck):
want = "int"
want_type = int
class Bool(TypeCheck):
want = "bool"
want_type = bool
class String(TypeCheck):
want = "string"
want_type = str
class Ip(TypeCheck):
want = "ip"
want_type = str
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
try:
socket.inet_pton(socket.AF_INET, structure)
return True
except socket.error:
try:
socket.inet_pton(socket.AF_INET6, structure)
return True
except socket.error:
pass
raise error.ValidatorError("{} is not a valid IP address".format(pre))
class ByteSize(TypeCheck):
want = "memory"
want_type = str
validator = re.compile("(?P<value>\d+)(\ *)(?P<unit>[kMGT])")
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
if self.validator.match(structure):
return True
else:
raise error.ValidatorError("{} is not a valid memory size".format(pre))
class List(TypeCheck):
want = "list"
want_type = list
def __init__(self, schema, desc=None):
TypeCheck.__init__(self, desc)
self.schema = schema
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate_each(item):
return self.schema.validate(pre, item)
return sum(map(_validate_each, structure)) > 1
def structure(self, accessor):
return [self.schema.structure(accessor)]
class Or(Validator):
def __init__(self, schemas, desc=None, exclusive=True):
Validator.__init__(self, desc)
self.schemas = schemas
self.exclusive = exclusive
def validate(self, pre, structure):
errors = []
def _validate_each(schema):
try:
return schema.validate(pre, structure)
except error.ValidatorError as err:
errors.append(err)
return False
state = sum(map(_validate_each, self.schemas))
if self.exclusive and (state > 1 or state == 0):
def _error_lines():
it = iter(errors)
yield " ".join(next(it).error())
for err in it:
yield "or"
yield " ".join(err.error())
raise error.ValidatorError(["{} is ambigous:".format(pre)] +
list(_error_lines()))
return True
def structure(self, accessor):
desc = []
descs = [ s.structure(accessor) for s in self.schemas ]
for d in descs[:-1]:
desc.append(d)
desc.append("__or__")
desc.append(descs[-1])
return desc
# Key validators --------------------------------------------------------------
class KeyValidator(Validator):
def structure(self, accessor, overwrite=None):
name = self.name
if overwrite:
name = overwrite
return ("{}".format(name), self.schema.structure(accessor))
class VariableKeys(KeyValidator):
def __init__(self, schema, example, desc=None):
KeyValidator.__init__(self, desc, example)
self.name = "*"
self.example = example
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
def _validate_each(pair):
(name, next_structure) = pair
return self.schema.validate(pre + " > " + name, next_structure)
return sum(map(_validate_each, structure.items())) >= 1
def structure(self, accessor):
if accessor == "example":
return KeyValidator.structure(self, accessor, self.example)
return KeyValidator.structure(self, accessor)
class Key(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
KeyValidator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
return False
return self.schema.validate(pre + " > " + self.name, value_of_key)
class RequiredKey(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
Validator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
raise error.ValidatorError("{} must have {} "
"defined".format(pre, self.name))
return self.schema.validate(pre + " > " + self.name, value_of_key)
class Dict(TypeCheck):
want = "dictonary"
want_type = dict
def __init__(self, schemas, desc=None):
TypeCheck.__init__(self, desc)
self.schemas = schemas
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate(schema):
return schema.validate(pre, structure)
return sum(map(_validate, self.schemas)) >= 1
def structure(self, accessor):
desc_dict = {}
for key, value in [s.structure(accessor) for s in self.schemas]:
desc_dict[key] = value
return desc_dict
<|fim▁end|> | if desc is None:
desc = self.want
Validator.__init__(self, example, desc) |
<|file_name|>validator.py<|end_file_name|><|fim▁begin|>import socket
import re
from xii import error, util
# sample validator
# keys = Dict(
# [
# RequiredKey("foo", String(), desc="A string to manipulate something"),
# Key("bar", String(), desc="something usefull")
# ],
# desc="Implement this stuff as you want"
# )
class Validator():
def __init__(self, example=None, description=None):
self._description = description
self._example = example
def structure(self, accessor):
if accessor == "example":
return self._example
return self._description
class TypeCheck(Validator):
want_type = None
want = "none"
def __init__(self, example, desc=None):
if desc is None:
desc = self.want
Validator.__init__(self, example, desc)
def validate(self, pre, structure):
<|fim_middle|>
class Int(TypeCheck):
want = "int"
want_type = int
class Bool(TypeCheck):
want = "bool"
want_type = bool
class String(TypeCheck):
want = "string"
want_type = str
class Ip(TypeCheck):
want = "ip"
want_type = str
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
try:
socket.inet_pton(socket.AF_INET, structure)
return True
except socket.error:
try:
socket.inet_pton(socket.AF_INET6, structure)
return True
except socket.error:
pass
raise error.ValidatorError("{} is not a valid IP address".format(pre))
class ByteSize(TypeCheck):
want = "memory"
want_type = str
validator = re.compile("(?P<value>\d+)(\ *)(?P<unit>[kMGT])")
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
if self.validator.match(structure):
return True
else:
raise error.ValidatorError("{} is not a valid memory size".format(pre))
class List(TypeCheck):
want = "list"
want_type = list
def __init__(self, schema, desc=None):
TypeCheck.__init__(self, desc)
self.schema = schema
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate_each(item):
return self.schema.validate(pre, item)
return sum(map(_validate_each, structure)) > 1
def structure(self, accessor):
return [self.schema.structure(accessor)]
class Or(Validator):
def __init__(self, schemas, desc=None, exclusive=True):
Validator.__init__(self, desc)
self.schemas = schemas
self.exclusive = exclusive
def validate(self, pre, structure):
errors = []
def _validate_each(schema):
try:
return schema.validate(pre, structure)
except error.ValidatorError as err:
errors.append(err)
return False
state = sum(map(_validate_each, self.schemas))
if self.exclusive and (state > 1 or state == 0):
def _error_lines():
it = iter(errors)
yield " ".join(next(it).error())
for err in it:
yield "or"
yield " ".join(err.error())
raise error.ValidatorError(["{} is ambigous:".format(pre)] +
list(_error_lines()))
return True
def structure(self, accessor):
desc = []
descs = [ s.structure(accessor) for s in self.schemas ]
for d in descs[:-1]:
desc.append(d)
desc.append("__or__")
desc.append(descs[-1])
return desc
# Key validators --------------------------------------------------------------
class KeyValidator(Validator):
def structure(self, accessor, overwrite=None):
name = self.name
if overwrite:
name = overwrite
return ("{}".format(name), self.schema.structure(accessor))
class VariableKeys(KeyValidator):
def __init__(self, schema, example, desc=None):
KeyValidator.__init__(self, desc, example)
self.name = "*"
self.example = example
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
def _validate_each(pair):
(name, next_structure) = pair
return self.schema.validate(pre + " > " + name, next_structure)
return sum(map(_validate_each, structure.items())) >= 1
def structure(self, accessor):
if accessor == "example":
return KeyValidator.structure(self, accessor, self.example)
return KeyValidator.structure(self, accessor)
class Key(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
KeyValidator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
return False
return self.schema.validate(pre + " > " + self.name, value_of_key)
class RequiredKey(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
Validator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
raise error.ValidatorError("{} must have {} "
"defined".format(pre, self.name))
return self.schema.validate(pre + " > " + self.name, value_of_key)
class Dict(TypeCheck):
want = "dictonary"
want_type = dict
def __init__(self, schemas, desc=None):
TypeCheck.__init__(self, desc)
self.schemas = schemas
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate(schema):
return schema.validate(pre, structure)
return sum(map(_validate, self.schemas)) >= 1
def structure(self, accessor):
desc_dict = {}
for key, value in [s.structure(accessor) for s in self.schemas]:
desc_dict[key] = value
return desc_dict
<|fim▁end|> | if isinstance(structure, self.want_type):
return True
raise error.ValidatorError("{} needs to be {}".format(pre, self.want))
return False |
<|file_name|>validator.py<|end_file_name|><|fim▁begin|>import socket
import re
from xii import error, util
# sample validator
# keys = Dict(
# [
# RequiredKey("foo", String(), desc="A string to manipulate something"),
# Key("bar", String(), desc="something usefull")
# ],
# desc="Implement this stuff as you want"
# )
class Validator():
def __init__(self, example=None, description=None):
self._description = description
self._example = example
def structure(self, accessor):
if accessor == "example":
return self._example
return self._description
class TypeCheck(Validator):
want_type = None
want = "none"
def __init__(self, example, desc=None):
if desc is None:
desc = self.want
Validator.__init__(self, example, desc)
def validate(self, pre, structure):
if isinstance(structure, self.want_type):
return True
raise error.ValidatorError("{} needs to be {}".format(pre, self.want))
return False
class Int(TypeCheck):
<|fim_middle|>
class Bool(TypeCheck):
want = "bool"
want_type = bool
class String(TypeCheck):
want = "string"
want_type = str
class Ip(TypeCheck):
want = "ip"
want_type = str
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
try:
socket.inet_pton(socket.AF_INET, structure)
return True
except socket.error:
try:
socket.inet_pton(socket.AF_INET6, structure)
return True
except socket.error:
pass
raise error.ValidatorError("{} is not a valid IP address".format(pre))
class ByteSize(TypeCheck):
want = "memory"
want_type = str
validator = re.compile("(?P<value>\d+)(\ *)(?P<unit>[kMGT])")
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
if self.validator.match(structure):
return True
else:
raise error.ValidatorError("{} is not a valid memory size".format(pre))
class List(TypeCheck):
want = "list"
want_type = list
def __init__(self, schema, desc=None):
TypeCheck.__init__(self, desc)
self.schema = schema
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate_each(item):
return self.schema.validate(pre, item)
return sum(map(_validate_each, structure)) > 1
def structure(self, accessor):
return [self.schema.structure(accessor)]
class Or(Validator):
def __init__(self, schemas, desc=None, exclusive=True):
Validator.__init__(self, desc)
self.schemas = schemas
self.exclusive = exclusive
def validate(self, pre, structure):
errors = []
def _validate_each(schema):
try:
return schema.validate(pre, structure)
except error.ValidatorError as err:
errors.append(err)
return False
state = sum(map(_validate_each, self.schemas))
if self.exclusive and (state > 1 or state == 0):
def _error_lines():
it = iter(errors)
yield " ".join(next(it).error())
for err in it:
yield "or"
yield " ".join(err.error())
raise error.ValidatorError(["{} is ambigous:".format(pre)] +
list(_error_lines()))
return True
def structure(self, accessor):
desc = []
descs = [ s.structure(accessor) for s in self.schemas ]
for d in descs[:-1]:
desc.append(d)
desc.append("__or__")
desc.append(descs[-1])
return desc
# Key validators --------------------------------------------------------------
class KeyValidator(Validator):
def structure(self, accessor, overwrite=None):
name = self.name
if overwrite:
name = overwrite
return ("{}".format(name), self.schema.structure(accessor))
class VariableKeys(KeyValidator):
def __init__(self, schema, example, desc=None):
KeyValidator.__init__(self, desc, example)
self.name = "*"
self.example = example
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
def _validate_each(pair):
(name, next_structure) = pair
return self.schema.validate(pre + " > " + name, next_structure)
return sum(map(_validate_each, structure.items())) >= 1
def structure(self, accessor):
if accessor == "example":
return KeyValidator.structure(self, accessor, self.example)
return KeyValidator.structure(self, accessor)
class Key(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
KeyValidator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
return False
return self.schema.validate(pre + " > " + self.name, value_of_key)
class RequiredKey(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
Validator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
raise error.ValidatorError("{} must have {} "
"defined".format(pre, self.name))
return self.schema.validate(pre + " > " + self.name, value_of_key)
class Dict(TypeCheck):
want = "dictonary"
want_type = dict
def __init__(self, schemas, desc=None):
TypeCheck.__init__(self, desc)
self.schemas = schemas
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate(schema):
return schema.validate(pre, structure)
return sum(map(_validate, self.schemas)) >= 1
def structure(self, accessor):
desc_dict = {}
for key, value in [s.structure(accessor) for s in self.schemas]:
desc_dict[key] = value
return desc_dict
<|fim▁end|> | want = "int"
want_type = int |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.