prompt
large_stringlengths 72
9.34k
| completion
large_stringlengths 0
7.61k
|
---|---|
<|file_name|>format_CIAAW.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import sys
import re
import mpmath as mp
mp.dps=250
mp.mp.dps = 250
if len(sys.argv) != 2:
print("Usage: format_CIAAW.py ciaawfile")
quit(1)
path = sys.argv[1]
atomre = re.compile(r'^(\d+) +(\w\w*) +(\w+) +\[?(\d+)\]?\*? +(.*) *$')
isore = re.compile(r'^(\d+)\*? +(\[?\d.*.*\]?) *$')
brange = re.compile(r'^\[([\d\.]+),([\d\.]+)\].*$')
buncertain = re.compile(r'^([\d\.]+)\((\d+)\)[a-z]*$')
bnum = re.compile(r'^([\d\d]+)$')
atommassline = re.compile(r'^(\d+) +(\w\w*) +(\w+) +(.*) *$')
def NumberStr(n):
# Replace spaces
s = n.replace(' ', '')
# remove "exactly" for the carbon mass
s = s.replace('(exactly)', '')
# if only a number, put it three times
m = bnum.match(s)
if m:
s = "{:<25} {:<25} {:<25}".format(m.group(1), m.group(1), m.group(1))
# if parentheses uncertainty...
m = buncertain.match(s)
if m:
# tricky. duplicate the first part as a string
<|fim_middle|>
# Replace bracketed ranges with parentheses
m = brange.match(s)
if m:
slow = mp.mpf(m.group(1))
shigh = mp.mpf(m.group(2))
smid = (shigh + slow)/mp.mpf("2.0")
s = "{:<25} {:<25} {:<25}".format(mp.nstr(smid, 18), mp.nstr(slow, 18), mp.nstr(shigh, 18))
# just a dash?
if s == "-":
s = "{:<25} {:<25} {:<25}".format(0, 0, 0)
return s
# First 5 lines are comments
filelines = [ x.strip() for x in open(path).readlines() ]
curatom = None
for line in filelines:
matomre = atomre.match(line)
misore = isore.match(line)
matommass = atommassline.match(line)
if matomre:
curatom = "{:<5} {:<5}".format(matomre.group(1), matomre.group(2))
print("{} {:<6} {:<25}".format(curatom, matomre.group(4), NumberStr(matomre.group(5))))
elif misore:
print("{} {:<6} {:<25}".format(curatom, misore.group(1), NumberStr(misore.group(2))))
elif matommass:
curatom = "{:<5} {:<5}".format(matommass.group(1), matommass.group(2))
print("{} {:<25}".format(curatom, NumberStr(matommass.group(4))))
else:
print(line) # comment lines, etc
<|fim▁end|> | s2 = m.group(1)
# but replace with all zero
s2 = re.sub(r'\d', '0', s2)
# now replace last characters
l = len(m.group(2))
s2 = s2[:len(s2)-l] + m.group(2)
# convert to a float
serr = mp.mpf(s2)
scenter = mp.mpf(m.group(1))
s = "{:<25} {:<25} {:<25}".format(mp.nstr(scenter, 18), mp.nstr(scenter-serr, 18), mp.nstr(scenter+serr, 18)) |
<|file_name|>format_CIAAW.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import sys
import re
import mpmath as mp
mp.dps=250
mp.mp.dps = 250
if len(sys.argv) != 2:
print("Usage: format_CIAAW.py ciaawfile")
quit(1)
path = sys.argv[1]
atomre = re.compile(r'^(\d+) +(\w\w*) +(\w+) +\[?(\d+)\]?\*? +(.*) *$')
isore = re.compile(r'^(\d+)\*? +(\[?\d.*.*\]?) *$')
brange = re.compile(r'^\[([\d\.]+),([\d\.]+)\].*$')
buncertain = re.compile(r'^([\d\.]+)\((\d+)\)[a-z]*$')
bnum = re.compile(r'^([\d\d]+)$')
atommassline = re.compile(r'^(\d+) +(\w\w*) +(\w+) +(.*) *$')
def NumberStr(n):
# Replace spaces
s = n.replace(' ', '')
# remove "exactly" for the carbon mass
s = s.replace('(exactly)', '')
# if only a number, put it three times
m = bnum.match(s)
if m:
s = "{:<25} {:<25} {:<25}".format(m.group(1), m.group(1), m.group(1))
# if parentheses uncertainty...
m = buncertain.match(s)
if m:
# tricky. duplicate the first part as a string
s2 = m.group(1)
# but replace with all zero
s2 = re.sub(r'\d', '0', s2)
# now replace last characters
l = len(m.group(2))
s2 = s2[:len(s2)-l] + m.group(2)
# convert to a float
serr = mp.mpf(s2)
scenter = mp.mpf(m.group(1))
s = "{:<25} {:<25} {:<25}".format(mp.nstr(scenter, 18), mp.nstr(scenter-serr, 18), mp.nstr(scenter+serr, 18))
# Replace bracketed ranges with parentheses
m = brange.match(s)
if m:
<|fim_middle|>
# just a dash?
if s == "-":
s = "{:<25} {:<25} {:<25}".format(0, 0, 0)
return s
# First 5 lines are comments
filelines = [ x.strip() for x in open(path).readlines() ]
curatom = None
for line in filelines:
matomre = atomre.match(line)
misore = isore.match(line)
matommass = atommassline.match(line)
if matomre:
curatom = "{:<5} {:<5}".format(matomre.group(1), matomre.group(2))
print("{} {:<6} {:<25}".format(curatom, matomre.group(4), NumberStr(matomre.group(5))))
elif misore:
print("{} {:<6} {:<25}".format(curatom, misore.group(1), NumberStr(misore.group(2))))
elif matommass:
curatom = "{:<5} {:<5}".format(matommass.group(1), matommass.group(2))
print("{} {:<25}".format(curatom, NumberStr(matommass.group(4))))
else:
print(line) # comment lines, etc
<|fim▁end|> | slow = mp.mpf(m.group(1))
shigh = mp.mpf(m.group(2))
smid = (shigh + slow)/mp.mpf("2.0")
s = "{:<25} {:<25} {:<25}".format(mp.nstr(smid, 18), mp.nstr(slow, 18), mp.nstr(shigh, 18)) |
<|file_name|>format_CIAAW.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import sys
import re
import mpmath as mp
mp.dps=250
mp.mp.dps = 250
if len(sys.argv) != 2:
print("Usage: format_CIAAW.py ciaawfile")
quit(1)
path = sys.argv[1]
atomre = re.compile(r'^(\d+) +(\w\w*) +(\w+) +\[?(\d+)\]?\*? +(.*) *$')
isore = re.compile(r'^(\d+)\*? +(\[?\d.*.*\]?) *$')
brange = re.compile(r'^\[([\d\.]+),([\d\.]+)\].*$')
buncertain = re.compile(r'^([\d\.]+)\((\d+)\)[a-z]*$')
bnum = re.compile(r'^([\d\d]+)$')
atommassline = re.compile(r'^(\d+) +(\w\w*) +(\w+) +(.*) *$')
def NumberStr(n):
# Replace spaces
s = n.replace(' ', '')
# remove "exactly" for the carbon mass
s = s.replace('(exactly)', '')
# if only a number, put it three times
m = bnum.match(s)
if m:
s = "{:<25} {:<25} {:<25}".format(m.group(1), m.group(1), m.group(1))
# if parentheses uncertainty...
m = buncertain.match(s)
if m:
# tricky. duplicate the first part as a string
s2 = m.group(1)
# but replace with all zero
s2 = re.sub(r'\d', '0', s2)
# now replace last characters
l = len(m.group(2))
s2 = s2[:len(s2)-l] + m.group(2)
# convert to a float
serr = mp.mpf(s2)
scenter = mp.mpf(m.group(1))
s = "{:<25} {:<25} {:<25}".format(mp.nstr(scenter, 18), mp.nstr(scenter-serr, 18), mp.nstr(scenter+serr, 18))
# Replace bracketed ranges with parentheses
m = brange.match(s)
if m:
slow = mp.mpf(m.group(1))
shigh = mp.mpf(m.group(2))
smid = (shigh + slow)/mp.mpf("2.0")
s = "{:<25} {:<25} {:<25}".format(mp.nstr(smid, 18), mp.nstr(slow, 18), mp.nstr(shigh, 18))
# just a dash?
if s == "-":
<|fim_middle|>
return s
# First 5 lines are comments
filelines = [ x.strip() for x in open(path).readlines() ]
curatom = None
for line in filelines:
matomre = atomre.match(line)
misore = isore.match(line)
matommass = atommassline.match(line)
if matomre:
curatom = "{:<5} {:<5}".format(matomre.group(1), matomre.group(2))
print("{} {:<6} {:<25}".format(curatom, matomre.group(4), NumberStr(matomre.group(5))))
elif misore:
print("{} {:<6} {:<25}".format(curatom, misore.group(1), NumberStr(misore.group(2))))
elif matommass:
curatom = "{:<5} {:<5}".format(matommass.group(1), matommass.group(2))
print("{} {:<25}".format(curatom, NumberStr(matommass.group(4))))
else:
print(line) # comment lines, etc
<|fim▁end|> | s = "{:<25} {:<25} {:<25}".format(0, 0, 0) |
<|file_name|>format_CIAAW.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import sys
import re
import mpmath as mp
mp.dps=250
mp.mp.dps = 250
if len(sys.argv) != 2:
print("Usage: format_CIAAW.py ciaawfile")
quit(1)
path = sys.argv[1]
atomre = re.compile(r'^(\d+) +(\w\w*) +(\w+) +\[?(\d+)\]?\*? +(.*) *$')
isore = re.compile(r'^(\d+)\*? +(\[?\d.*.*\]?) *$')
brange = re.compile(r'^\[([\d\.]+),([\d\.]+)\].*$')
buncertain = re.compile(r'^([\d\.]+)\((\d+)\)[a-z]*$')
bnum = re.compile(r'^([\d\d]+)$')
atommassline = re.compile(r'^(\d+) +(\w\w*) +(\w+) +(.*) *$')
def NumberStr(n):
# Replace spaces
s = n.replace(' ', '')
# remove "exactly" for the carbon mass
s = s.replace('(exactly)', '')
# if only a number, put it three times
m = bnum.match(s)
if m:
s = "{:<25} {:<25} {:<25}".format(m.group(1), m.group(1), m.group(1))
# if parentheses uncertainty...
m = buncertain.match(s)
if m:
# tricky. duplicate the first part as a string
s2 = m.group(1)
# but replace with all zero
s2 = re.sub(r'\d', '0', s2)
# now replace last characters
l = len(m.group(2))
s2 = s2[:len(s2)-l] + m.group(2)
# convert to a float
serr = mp.mpf(s2)
scenter = mp.mpf(m.group(1))
s = "{:<25} {:<25} {:<25}".format(mp.nstr(scenter, 18), mp.nstr(scenter-serr, 18), mp.nstr(scenter+serr, 18))
# Replace bracketed ranges with parentheses
m = brange.match(s)
if m:
slow = mp.mpf(m.group(1))
shigh = mp.mpf(m.group(2))
smid = (shigh + slow)/mp.mpf("2.0")
s = "{:<25} {:<25} {:<25}".format(mp.nstr(smid, 18), mp.nstr(slow, 18), mp.nstr(shigh, 18))
# just a dash?
if s == "-":
s = "{:<25} {:<25} {:<25}".format(0, 0, 0)
return s
# First 5 lines are comments
filelines = [ x.strip() for x in open(path).readlines() ]
curatom = None
for line in filelines:
matomre = atomre.match(line)
misore = isore.match(line)
matommass = atommassline.match(line)
if matomre:
<|fim_middle|>
elif misore:
print("{} {:<6} {:<25}".format(curatom, misore.group(1), NumberStr(misore.group(2))))
elif matommass:
curatom = "{:<5} {:<5}".format(matommass.group(1), matommass.group(2))
print("{} {:<25}".format(curatom, NumberStr(matommass.group(4))))
else:
print(line) # comment lines, etc
<|fim▁end|> | curatom = "{:<5} {:<5}".format(matomre.group(1), matomre.group(2))
print("{} {:<6} {:<25}".format(curatom, matomre.group(4), NumberStr(matomre.group(5)))) |
<|file_name|>format_CIAAW.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import sys
import re
import mpmath as mp
mp.dps=250
mp.mp.dps = 250
if len(sys.argv) != 2:
print("Usage: format_CIAAW.py ciaawfile")
quit(1)
path = sys.argv[1]
atomre = re.compile(r'^(\d+) +(\w\w*) +(\w+) +\[?(\d+)\]?\*? +(.*) *$')
isore = re.compile(r'^(\d+)\*? +(\[?\d.*.*\]?) *$')
brange = re.compile(r'^\[([\d\.]+),([\d\.]+)\].*$')
buncertain = re.compile(r'^([\d\.]+)\((\d+)\)[a-z]*$')
bnum = re.compile(r'^([\d\d]+)$')
atommassline = re.compile(r'^(\d+) +(\w\w*) +(\w+) +(.*) *$')
def NumberStr(n):
# Replace spaces
s = n.replace(' ', '')
# remove "exactly" for the carbon mass
s = s.replace('(exactly)', '')
# if only a number, put it three times
m = bnum.match(s)
if m:
s = "{:<25} {:<25} {:<25}".format(m.group(1), m.group(1), m.group(1))
# if parentheses uncertainty...
m = buncertain.match(s)
if m:
# tricky. duplicate the first part as a string
s2 = m.group(1)
# but replace with all zero
s2 = re.sub(r'\d', '0', s2)
# now replace last characters
l = len(m.group(2))
s2 = s2[:len(s2)-l] + m.group(2)
# convert to a float
serr = mp.mpf(s2)
scenter = mp.mpf(m.group(1))
s = "{:<25} {:<25} {:<25}".format(mp.nstr(scenter, 18), mp.nstr(scenter-serr, 18), mp.nstr(scenter+serr, 18))
# Replace bracketed ranges with parentheses
m = brange.match(s)
if m:
slow = mp.mpf(m.group(1))
shigh = mp.mpf(m.group(2))
smid = (shigh + slow)/mp.mpf("2.0")
s = "{:<25} {:<25} {:<25}".format(mp.nstr(smid, 18), mp.nstr(slow, 18), mp.nstr(shigh, 18))
# just a dash?
if s == "-":
s = "{:<25} {:<25} {:<25}".format(0, 0, 0)
return s
# First 5 lines are comments
filelines = [ x.strip() for x in open(path).readlines() ]
curatom = None
for line in filelines:
matomre = atomre.match(line)
misore = isore.match(line)
matommass = atommassline.match(line)
if matomre:
curatom = "{:<5} {:<5}".format(matomre.group(1), matomre.group(2))
print("{} {:<6} {:<25}".format(curatom, matomre.group(4), NumberStr(matomre.group(5))))
elif misore:
<|fim_middle|>
elif matommass:
curatom = "{:<5} {:<5}".format(matommass.group(1), matommass.group(2))
print("{} {:<25}".format(curatom, NumberStr(matommass.group(4))))
else:
print(line) # comment lines, etc
<|fim▁end|> | print("{} {:<6} {:<25}".format(curatom, misore.group(1), NumberStr(misore.group(2)))) |
<|file_name|>format_CIAAW.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import sys
import re
import mpmath as mp
mp.dps=250
mp.mp.dps = 250
if len(sys.argv) != 2:
print("Usage: format_CIAAW.py ciaawfile")
quit(1)
path = sys.argv[1]
atomre = re.compile(r'^(\d+) +(\w\w*) +(\w+) +\[?(\d+)\]?\*? +(.*) *$')
isore = re.compile(r'^(\d+)\*? +(\[?\d.*.*\]?) *$')
brange = re.compile(r'^\[([\d\.]+),([\d\.]+)\].*$')
buncertain = re.compile(r'^([\d\.]+)\((\d+)\)[a-z]*$')
bnum = re.compile(r'^([\d\d]+)$')
atommassline = re.compile(r'^(\d+) +(\w\w*) +(\w+) +(.*) *$')
def NumberStr(n):
# Replace spaces
s = n.replace(' ', '')
# remove "exactly" for the carbon mass
s = s.replace('(exactly)', '')
# if only a number, put it three times
m = bnum.match(s)
if m:
s = "{:<25} {:<25} {:<25}".format(m.group(1), m.group(1), m.group(1))
# if parentheses uncertainty...
m = buncertain.match(s)
if m:
# tricky. duplicate the first part as a string
s2 = m.group(1)
# but replace with all zero
s2 = re.sub(r'\d', '0', s2)
# now replace last characters
l = len(m.group(2))
s2 = s2[:len(s2)-l] + m.group(2)
# convert to a float
serr = mp.mpf(s2)
scenter = mp.mpf(m.group(1))
s = "{:<25} {:<25} {:<25}".format(mp.nstr(scenter, 18), mp.nstr(scenter-serr, 18), mp.nstr(scenter+serr, 18))
# Replace bracketed ranges with parentheses
m = brange.match(s)
if m:
slow = mp.mpf(m.group(1))
shigh = mp.mpf(m.group(2))
smid = (shigh + slow)/mp.mpf("2.0")
s = "{:<25} {:<25} {:<25}".format(mp.nstr(smid, 18), mp.nstr(slow, 18), mp.nstr(shigh, 18))
# just a dash?
if s == "-":
s = "{:<25} {:<25} {:<25}".format(0, 0, 0)
return s
# First 5 lines are comments
filelines = [ x.strip() for x in open(path).readlines() ]
curatom = None
for line in filelines:
matomre = atomre.match(line)
misore = isore.match(line)
matommass = atommassline.match(line)
if matomre:
curatom = "{:<5} {:<5}".format(matomre.group(1), matomre.group(2))
print("{} {:<6} {:<25}".format(curatom, matomre.group(4), NumberStr(matomre.group(5))))
elif misore:
print("{} {:<6} {:<25}".format(curatom, misore.group(1), NumberStr(misore.group(2))))
elif matommass:
<|fim_middle|>
else:
print(line) # comment lines, etc
<|fim▁end|> | curatom = "{:<5} {:<5}".format(matommass.group(1), matommass.group(2))
print("{} {:<25}".format(curatom, NumberStr(matommass.group(4)))) |
<|file_name|>format_CIAAW.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import sys
import re
import mpmath as mp
mp.dps=250
mp.mp.dps = 250
if len(sys.argv) != 2:
print("Usage: format_CIAAW.py ciaawfile")
quit(1)
path = sys.argv[1]
atomre = re.compile(r'^(\d+) +(\w\w*) +(\w+) +\[?(\d+)\]?\*? +(.*) *$')
isore = re.compile(r'^(\d+)\*? +(\[?\d.*.*\]?) *$')
brange = re.compile(r'^\[([\d\.]+),([\d\.]+)\].*$')
buncertain = re.compile(r'^([\d\.]+)\((\d+)\)[a-z]*$')
bnum = re.compile(r'^([\d\d]+)$')
atommassline = re.compile(r'^(\d+) +(\w\w*) +(\w+) +(.*) *$')
def NumberStr(n):
# Replace spaces
s = n.replace(' ', '')
# remove "exactly" for the carbon mass
s = s.replace('(exactly)', '')
# if only a number, put it three times
m = bnum.match(s)
if m:
s = "{:<25} {:<25} {:<25}".format(m.group(1), m.group(1), m.group(1))
# if parentheses uncertainty...
m = buncertain.match(s)
if m:
# tricky. duplicate the first part as a string
s2 = m.group(1)
# but replace with all zero
s2 = re.sub(r'\d', '0', s2)
# now replace last characters
l = len(m.group(2))
s2 = s2[:len(s2)-l] + m.group(2)
# convert to a float
serr = mp.mpf(s2)
scenter = mp.mpf(m.group(1))
s = "{:<25} {:<25} {:<25}".format(mp.nstr(scenter, 18), mp.nstr(scenter-serr, 18), mp.nstr(scenter+serr, 18))
# Replace bracketed ranges with parentheses
m = brange.match(s)
if m:
slow = mp.mpf(m.group(1))
shigh = mp.mpf(m.group(2))
smid = (shigh + slow)/mp.mpf("2.0")
s = "{:<25} {:<25} {:<25}".format(mp.nstr(smid, 18), mp.nstr(slow, 18), mp.nstr(shigh, 18))
# just a dash?
if s == "-":
s = "{:<25} {:<25} {:<25}".format(0, 0, 0)
return s
# First 5 lines are comments
filelines = [ x.strip() for x in open(path).readlines() ]
curatom = None
for line in filelines:
matomre = atomre.match(line)
misore = isore.match(line)
matommass = atommassline.match(line)
if matomre:
curatom = "{:<5} {:<5}".format(matomre.group(1), matomre.group(2))
print("{} {:<6} {:<25}".format(curatom, matomre.group(4), NumberStr(matomre.group(5))))
elif misore:
print("{} {:<6} {:<25}".format(curatom, misore.group(1), NumberStr(misore.group(2))))
elif matommass:
curatom = "{:<5} {:<5}".format(matommass.group(1), matommass.group(2))
print("{} {:<25}".format(curatom, NumberStr(matommass.group(4))))
else:
<|fim_middle|>
<|fim▁end|> | print(line) # comment lines, etc |
<|file_name|>format_CIAAW.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import sys
import re
import mpmath as mp
mp.dps=250
mp.mp.dps = 250
if len(sys.argv) != 2:
print("Usage: format_CIAAW.py ciaawfile")
quit(1)
path = sys.argv[1]
atomre = re.compile(r'^(\d+) +(\w\w*) +(\w+) +\[?(\d+)\]?\*? +(.*) *$')
isore = re.compile(r'^(\d+)\*? +(\[?\d.*.*\]?) *$')
brange = re.compile(r'^\[([\d\.]+),([\d\.]+)\].*$')
buncertain = re.compile(r'^([\d\.]+)\((\d+)\)[a-z]*$')
bnum = re.compile(r'^([\d\d]+)$')
atommassline = re.compile(r'^(\d+) +(\w\w*) +(\w+) +(.*) *$')
def <|fim_middle|>(n):
# Replace spaces
s = n.replace(' ', '')
# remove "exactly" for the carbon mass
s = s.replace('(exactly)', '')
# if only a number, put it three times
m = bnum.match(s)
if m:
s = "{:<25} {:<25} {:<25}".format(m.group(1), m.group(1), m.group(1))
# if parentheses uncertainty...
m = buncertain.match(s)
if m:
# tricky. duplicate the first part as a string
s2 = m.group(1)
# but replace with all zero
s2 = re.sub(r'\d', '0', s2)
# now replace last characters
l = len(m.group(2))
s2 = s2[:len(s2)-l] + m.group(2)
# convert to a float
serr = mp.mpf(s2)
scenter = mp.mpf(m.group(1))
s = "{:<25} {:<25} {:<25}".format(mp.nstr(scenter, 18), mp.nstr(scenter-serr, 18), mp.nstr(scenter+serr, 18))
# Replace bracketed ranges with parentheses
m = brange.match(s)
if m:
slow = mp.mpf(m.group(1))
shigh = mp.mpf(m.group(2))
smid = (shigh + slow)/mp.mpf("2.0")
s = "{:<25} {:<25} {:<25}".format(mp.nstr(smid, 18), mp.nstr(slow, 18), mp.nstr(shigh, 18))
# just a dash?
if s == "-":
s = "{:<25} {:<25} {:<25}".format(0, 0, 0)
return s
# First 5 lines are comments
filelines = [ x.strip() for x in open(path).readlines() ]
curatom = None
for line in filelines:
matomre = atomre.match(line)
misore = isore.match(line)
matommass = atommassline.match(line)
if matomre:
curatom = "{:<5} {:<5}".format(matomre.group(1), matomre.group(2))
print("{} {:<6} {:<25}".format(curatom, matomre.group(4), NumberStr(matomre.group(5))))
elif misore:
print("{} {:<6} {:<25}".format(curatom, misore.group(1), NumberStr(misore.group(2))))
elif matommass:
curatom = "{:<5} {:<5}".format(matommass.group(1), matommass.group(2))
print("{} {:<25}".format(curatom, NumberStr(matommass.group(4))))
else:
print(line) # comment lines, etc
<|fim▁end|> | NumberStr |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)<|fim▁hole|> This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)<|fim▁end|> |
def run_test(self, input_code):
"""Run the actual test
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
<|fim_middle|>
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | """Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code)) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
<|fim_middle|>
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code)) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
<|fim_middle|>
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | """Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
<|fim_middle|>
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | def func(cls):
return cls.run_test(code)
return func |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
<|fim_middle|>
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | return cls.run_test(code) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
<|fim_middle|>
<|fim▁end|> | """Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
<|fim_middle|>
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
<|fim_middle|>
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | self.cj = cangjie.Cangjie(self.version, self.language) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
<|fim_middle|>
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | del self.cj |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
<|fim_middle|>
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | """Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
<|fim_middle|>
<|fim▁end|> | """Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def <|fim_middle|>(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | __init__ |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def <|fim_middle|>():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | gen_codes |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def <|fim_middle|>(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | tester |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def <|fim_middle|>(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | func |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def <|fim_middle|>(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | __init__ |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def <|fim_middle|>(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | setUp |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def <|fim_middle|>(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | tearDown |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def <|fim_middle|>(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | run_command |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def <|fim_middle|>(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
<|fim▁end|> | run_test |
<|file_name|>suspend.py<|end_file_name|><|fim▁begin|>from verbs.baseforms import forms
class SuspendForm(forms.VerbForm):
name = "Suspend"
slug = "suspend"
<|fim▁hole|><|fim▁end|> | duration_min_time = forms.IntegerField() |
<|file_name|>suspend.py<|end_file_name|><|fim▁begin|>from verbs.baseforms import forms
class SuspendForm(forms.VerbForm):
<|fim_middle|>
<|fim▁end|> | name = "Suspend"
slug = "suspend"
duration_min_time = forms.IntegerField() |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.<|fim▁hole|> :param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)<|fim▁end|> | :type resource_id: str |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
<|fim_middle|>
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | """
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
<|fim_middle|>
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs) |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
<|fim_middle|>
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing") |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
<|fim_middle|>
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | pass |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
<|fim_middle|>
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | """
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id) |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
<|fim_middle|>
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs) |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
<|fim_middle|>
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | return self._hook.start_instance(self.project_id, self.zone, self.resource_id) |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
<|fim_middle|>
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | """
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id) |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
<|fim_middle|>
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs) |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
<|fim_middle|>
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | return self._hook.stop_instance(self.project_id, self.zone, self.resource_id) |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
<|fim_middle|>
<|fim▁end|> | """
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body) |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
<|fim_middle|>
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs) |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
<|fim_middle|>
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | if self._field_validator:
self._field_validator.validate(self.body) |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
<|fim_middle|>
<|fim▁end|> | self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body) |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
<|fim_middle|>
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | raise AirflowException("The required parameter 'project_id' is missing") |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
<|fim_middle|>
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | raise AirflowException("The required parameter 'zone' is missing") |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
<|fim_middle|>
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | raise AirflowException("The required parameter 'resource_id' is missing") |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
<|fim_middle|>
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version) |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
<|fim_middle|>
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | self._field_validator.validate(self.body) |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def <|fim_middle|>(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | __init__ |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def <|fim_middle|>(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | _validate_inputs |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def <|fim_middle|>(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | execute |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def <|fim_middle|>(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | __init__ |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def <|fim_middle|>(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | execute |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def <|fim_middle|>(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | __init__ |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def <|fim_middle|>(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | execute |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def <|fim_middle|>(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | __init__ |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def <|fim_middle|>(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | _validate_all_body_fields |
<|file_name|>gcp_compute_operator.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.full_location = 'projects/{}/zones/{}'.format(self.project_id,
self.zone)
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super(GceBaseOperator, self).__init__(*args, **kwargs)
def _validate_inputs(self):
if not self.project_id:
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Start an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStartOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(self.project_id, self.zone, self.resource_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stop an instance in Google Compute Engine.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super(GceInstanceStopOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.stop_instance(self.project_id, self.zone, self.resource_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
:param project_id: Google Cloud Platform project where the Compute Engine
instance exists.
:type project_id: str
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
:param api_version: API version used (e.g. v1).
:type api_version: str
"""
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
@apply_defaults
def __init__(self,
project_id,
zone,
resource_id,
body,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super(GceSetMachineTypeOperator, self).__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def <|fim_middle|>(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(self.project_id, self.zone,
self.resource_id, self.body)
<|fim▁end|> | execute |
<|file_name|>specialist_pool.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1", manifest={"SpecialistPool",},
)
class SpecialistPool(proto.Message):
r"""SpecialistPool represents customers' own workforce to work on
their data labeling jobs. It includes a group of specialist
managers and workers. Managers are responsible for managing the
workers in this pool as well as customers' data labeling jobs
associated with this pool. Customers create specialist pool as<|fim▁hole|> well as start data labeling jobs on Cloud, managers and workers
handle the jobs using CrowdCompute console.
Attributes:
name (str):
Required. The resource name of the
SpecialistPool.
display_name (str):
Required. The user-defined name of the
SpecialistPool. The name can be up to 128
characters long and can be consist of any UTF-8
characters.
This field should be unique on project-level.
specialist_managers_count (int):
Output only. The number of managers in this
SpecialistPool.
specialist_manager_emails (Sequence[str]):
The email addresses of the managers in the
SpecialistPool.
pending_data_labeling_jobs (Sequence[str]):
Output only. The resource name of the pending
data labeling jobs.
specialist_worker_emails (Sequence[str]):
The email addresses of workers in the
SpecialistPool.
"""
name = proto.Field(proto.STRING, number=1,)
display_name = proto.Field(proto.STRING, number=2,)
specialist_managers_count = proto.Field(proto.INT32, number=3,)
specialist_manager_emails = proto.RepeatedField(proto.STRING, number=4,)
pending_data_labeling_jobs = proto.RepeatedField(proto.STRING, number=5,)
specialist_worker_emails = proto.RepeatedField(proto.STRING, number=7,)
__all__ = tuple(sorted(__protobuf__.manifest))<|fim▁end|> | |
<|file_name|>specialist_pool.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1", manifest={"SpecialistPool",},
)
class SpecialistPool(proto.Message):
<|fim_middle|>
__all__ = tuple(sorted(__protobuf__.manifest))
<|fim▁end|> | r"""SpecialistPool represents customers' own workforce to work on
their data labeling jobs. It includes a group of specialist
managers and workers. Managers are responsible for managing the
workers in this pool as well as customers' data labeling jobs
associated with this pool. Customers create specialist pool as
well as start data labeling jobs on Cloud, managers and workers
handle the jobs using CrowdCompute console.
Attributes:
name (str):
Required. The resource name of the
SpecialistPool.
display_name (str):
Required. The user-defined name of the
SpecialistPool. The name can be up to 128
characters long and can be consist of any UTF-8
characters.
This field should be unique on project-level.
specialist_managers_count (int):
Output only. The number of managers in this
SpecialistPool.
specialist_manager_emails (Sequence[str]):
The email addresses of the managers in the
SpecialistPool.
pending_data_labeling_jobs (Sequence[str]):
Output only. The resource name of the pending
data labeling jobs.
specialist_worker_emails (Sequence[str]):
The email addresses of workers in the
SpecialistPool.
"""
name = proto.Field(proto.STRING, number=1,)
display_name = proto.Field(proto.STRING, number=2,)
specialist_managers_count = proto.Field(proto.INT32, number=3,)
specialist_manager_emails = proto.RepeatedField(proto.STRING, number=4,)
pending_data_labeling_jobs = proto.RepeatedField(proto.STRING, number=5,)
specialist_worker_emails = proto.RepeatedField(proto.STRING, number=7,) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Util classes
------------
Classes which represent data types useful for the package pySpatialTools.
"""
## Spatial elements collectors
from spatialelements import SpatialElementsCollection, Locations<|fim▁hole|>
## Membership relations
from Membership import Membership<|fim▁end|> | |
<|file_name|>DT_Utils.py<|end_file_name|><|fim▁begin|># Copyright 2017 Priscilla Boyd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
The DT_Utils module provides helper functions for Decision Tree algorithms implementation, model creation and
analysis.
"""
import pickle
from matplotlib import pyplot as plt
from sklearn.metrics import mean_squared_error
from tools.Utils import create_folder_if_not_exists
# noinspection PyTypeChecker
def score_dt(model_name, model, X, y, y_actual, output_folder):
"""
Score a decision tree model.
:param string model_name: title for the model used on the output filename
:param dataframe model: model reference
:param dataframe X: examples
:param dataframe y: targets
:param dataframe y_actual: target results
:param string output_folder: location of the output / results
"""
print("Scoring model...")
model_score = model.score(X, y)
mse = mean_squared_error(y, y_actual)
mse_score = model_name, "- Mean Squared Error:", mse
accuracy = model_name, "- Accuracy score (%):", "{:.2%}".format(model_score)
# write to file
path = output_folder + '/models'
create_folder_if_not_exists(path)
filename = path + '/score_' + model_name + '.txt'
with open(filename, 'w') as scores:
print(mse_score, file=scores)
print(accuracy, file=scores)
scores.close()
print("Scores saved location:", filename)
def plot_dt(model_name, y_actual, y_test, output_folder):
"""
Plot decision tree, y (training) vs y (test/actual).
:param string model_name: title for the model used on the output filename
:param dataframe y_actual: target results
:param dataframe y_test: test targets
:param string output_folder: location of the output / results
"""
# initialise plot path
path = output_folder + '/models'
print("Plotting results...")
plt.scatter(y_actual, y_test, label='Duration')
plt.title('Decision Tree')
plt.plot([0, 1], [0, 1], '--k', transform=plt.gca().transAxes)
plt.xlabel('y (actual)')
plt.ylabel('y (test)')
plt.legend()
plot_path = path + '/plot_' + model_name + '.png'
plt.savefig(plot_path)
print("Plot saved location:", plot_path)
def save_dt_model(model_name, model, folder):<|fim▁hole|> :param dataframe model: model reference
:param string model_name: title for the model used on the output filename
:param string folder: location of model output
"""
print("Saving model...")
model_file = folder + '/models/' + model_name + '.pkl'
path = open(model_file, 'wb')
pickle.dump(model, path)
print("Model saved location:", model_file)
def load_dt_model(pickle_model):
"""
Retrieve model using Pickle binary format.
:param string pickle_model: location of Pickle model
:return: Pickle model for re-use
:rtype: object
"""
return pickle.loads(pickle_model)<|fim▁end|> | """
Save model using Pickle binary format.
|
<|file_name|>DT_Utils.py<|end_file_name|><|fim▁begin|># Copyright 2017 Priscilla Boyd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
The DT_Utils module provides helper functions for Decision Tree algorithms implementation, model creation and
analysis.
"""
import pickle
from matplotlib import pyplot as plt
from sklearn.metrics import mean_squared_error
from tools.Utils import create_folder_if_not_exists
# noinspection PyTypeChecker
def score_dt(model_name, model, X, y, y_actual, output_folder):
<|fim_middle|>
def plot_dt(model_name, y_actual, y_test, output_folder):
"""
Plot decision tree, y (training) vs y (test/actual).
:param string model_name: title for the model used on the output filename
:param dataframe y_actual: target results
:param dataframe y_test: test targets
:param string output_folder: location of the output / results
"""
# initialise plot path
path = output_folder + '/models'
print("Plotting results...")
plt.scatter(y_actual, y_test, label='Duration')
plt.title('Decision Tree')
plt.plot([0, 1], [0, 1], '--k', transform=plt.gca().transAxes)
plt.xlabel('y (actual)')
plt.ylabel('y (test)')
plt.legend()
plot_path = path + '/plot_' + model_name + '.png'
plt.savefig(plot_path)
print("Plot saved location:", plot_path)
def save_dt_model(model_name, model, folder):
"""
Save model using Pickle binary format.
:param dataframe model: model reference
:param string model_name: title for the model used on the output filename
:param string folder: location of model output
"""
print("Saving model...")
model_file = folder + '/models/' + model_name + '.pkl'
path = open(model_file, 'wb')
pickle.dump(model, path)
print("Model saved location:", model_file)
def load_dt_model(pickle_model):
"""
Retrieve model using Pickle binary format.
:param string pickle_model: location of Pickle model
:return: Pickle model for re-use
:rtype: object
"""
return pickle.loads(pickle_model)
<|fim▁end|> | """
Score a decision tree model.
:param string model_name: title for the model used on the output filename
:param dataframe model: model reference
:param dataframe X: examples
:param dataframe y: targets
:param dataframe y_actual: target results
:param string output_folder: location of the output / results
"""
print("Scoring model...")
model_score = model.score(X, y)
mse = mean_squared_error(y, y_actual)
mse_score = model_name, "- Mean Squared Error:", mse
accuracy = model_name, "- Accuracy score (%):", "{:.2%}".format(model_score)
# write to file
path = output_folder + '/models'
create_folder_if_not_exists(path)
filename = path + '/score_' + model_name + '.txt'
with open(filename, 'w') as scores:
print(mse_score, file=scores)
print(accuracy, file=scores)
scores.close()
print("Scores saved location:", filename) |
<|file_name|>DT_Utils.py<|end_file_name|><|fim▁begin|># Copyright 2017 Priscilla Boyd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
The DT_Utils module provides helper functions for Decision Tree algorithms implementation, model creation and
analysis.
"""
import pickle
from matplotlib import pyplot as plt
from sklearn.metrics import mean_squared_error
from tools.Utils import create_folder_if_not_exists
# noinspection PyTypeChecker
def score_dt(model_name, model, X, y, y_actual, output_folder):
"""
Score a decision tree model.
:param string model_name: title for the model used on the output filename
:param dataframe model: model reference
:param dataframe X: examples
:param dataframe y: targets
:param dataframe y_actual: target results
:param string output_folder: location of the output / results
"""
print("Scoring model...")
model_score = model.score(X, y)
mse = mean_squared_error(y, y_actual)
mse_score = model_name, "- Mean Squared Error:", mse
accuracy = model_name, "- Accuracy score (%):", "{:.2%}".format(model_score)
# write to file
path = output_folder + '/models'
create_folder_if_not_exists(path)
filename = path + '/score_' + model_name + '.txt'
with open(filename, 'w') as scores:
print(mse_score, file=scores)
print(accuracy, file=scores)
scores.close()
print("Scores saved location:", filename)
def plot_dt(model_name, y_actual, y_test, output_folder):
<|fim_middle|>
def save_dt_model(model_name, model, folder):
"""
Save model using Pickle binary format.
:param dataframe model: model reference
:param string model_name: title for the model used on the output filename
:param string folder: location of model output
"""
print("Saving model...")
model_file = folder + '/models/' + model_name + '.pkl'
path = open(model_file, 'wb')
pickle.dump(model, path)
print("Model saved location:", model_file)
def load_dt_model(pickle_model):
"""
Retrieve model using Pickle binary format.
:param string pickle_model: location of Pickle model
:return: Pickle model for re-use
:rtype: object
"""
return pickle.loads(pickle_model)
<|fim▁end|> | """
Plot decision tree, y (training) vs y (test/actual).
:param string model_name: title for the model used on the output filename
:param dataframe y_actual: target results
:param dataframe y_test: test targets
:param string output_folder: location of the output / results
"""
# initialise plot path
path = output_folder + '/models'
print("Plotting results...")
plt.scatter(y_actual, y_test, label='Duration')
plt.title('Decision Tree')
plt.plot([0, 1], [0, 1], '--k', transform=plt.gca().transAxes)
plt.xlabel('y (actual)')
plt.ylabel('y (test)')
plt.legend()
plot_path = path + '/plot_' + model_name + '.png'
plt.savefig(plot_path)
print("Plot saved location:", plot_path) |
<|file_name|>DT_Utils.py<|end_file_name|><|fim▁begin|># Copyright 2017 Priscilla Boyd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
The DT_Utils module provides helper functions for Decision Tree algorithms implementation, model creation and
analysis.
"""
import pickle
from matplotlib import pyplot as plt
from sklearn.metrics import mean_squared_error
from tools.Utils import create_folder_if_not_exists
# noinspection PyTypeChecker
def score_dt(model_name, model, X, y, y_actual, output_folder):
"""
Score a decision tree model.
:param string model_name: title for the model used on the output filename
:param dataframe model: model reference
:param dataframe X: examples
:param dataframe y: targets
:param dataframe y_actual: target results
:param string output_folder: location of the output / results
"""
print("Scoring model...")
model_score = model.score(X, y)
mse = mean_squared_error(y, y_actual)
mse_score = model_name, "- Mean Squared Error:", mse
accuracy = model_name, "- Accuracy score (%):", "{:.2%}".format(model_score)
# write to file
path = output_folder + '/models'
create_folder_if_not_exists(path)
filename = path + '/score_' + model_name + '.txt'
with open(filename, 'w') as scores:
print(mse_score, file=scores)
print(accuracy, file=scores)
scores.close()
print("Scores saved location:", filename)
def plot_dt(model_name, y_actual, y_test, output_folder):
"""
Plot decision tree, y (training) vs y (test/actual).
:param string model_name: title for the model used on the output filename
:param dataframe y_actual: target results
:param dataframe y_test: test targets
:param string output_folder: location of the output / results
"""
# initialise plot path
path = output_folder + '/models'
print("Plotting results...")
plt.scatter(y_actual, y_test, label='Duration')
plt.title('Decision Tree')
plt.plot([0, 1], [0, 1], '--k', transform=plt.gca().transAxes)
plt.xlabel('y (actual)')
plt.ylabel('y (test)')
plt.legend()
plot_path = path + '/plot_' + model_name + '.png'
plt.savefig(plot_path)
print("Plot saved location:", plot_path)
def save_dt_model(model_name, model, folder):
<|fim_middle|>
def load_dt_model(pickle_model):
"""
Retrieve model using Pickle binary format.
:param string pickle_model: location of Pickle model
:return: Pickle model for re-use
:rtype: object
"""
return pickle.loads(pickle_model)
<|fim▁end|> | """
Save model using Pickle binary format.
:param dataframe model: model reference
:param string model_name: title for the model used on the output filename
:param string folder: location of model output
"""
print("Saving model...")
model_file = folder + '/models/' + model_name + '.pkl'
path = open(model_file, 'wb')
pickle.dump(model, path)
print("Model saved location:", model_file) |
<|file_name|>DT_Utils.py<|end_file_name|><|fim▁begin|># Copyright 2017 Priscilla Boyd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
The DT_Utils module provides helper functions for Decision Tree algorithms implementation, model creation and
analysis.
"""
import pickle
from matplotlib import pyplot as plt
from sklearn.metrics import mean_squared_error
from tools.Utils import create_folder_if_not_exists
# noinspection PyTypeChecker
def score_dt(model_name, model, X, y, y_actual, output_folder):
"""
Score a decision tree model.
:param string model_name: title for the model used on the output filename
:param dataframe model: model reference
:param dataframe X: examples
:param dataframe y: targets
:param dataframe y_actual: target results
:param string output_folder: location of the output / results
"""
print("Scoring model...")
model_score = model.score(X, y)
mse = mean_squared_error(y, y_actual)
mse_score = model_name, "- Mean Squared Error:", mse
accuracy = model_name, "- Accuracy score (%):", "{:.2%}".format(model_score)
# write to file
path = output_folder + '/models'
create_folder_if_not_exists(path)
filename = path + '/score_' + model_name + '.txt'
with open(filename, 'w') as scores:
print(mse_score, file=scores)
print(accuracy, file=scores)
scores.close()
print("Scores saved location:", filename)
def plot_dt(model_name, y_actual, y_test, output_folder):
"""
Plot decision tree, y (training) vs y (test/actual).
:param string model_name: title for the model used on the output filename
:param dataframe y_actual: target results
:param dataframe y_test: test targets
:param string output_folder: location of the output / results
"""
# initialise plot path
path = output_folder + '/models'
print("Plotting results...")
plt.scatter(y_actual, y_test, label='Duration')
plt.title('Decision Tree')
plt.plot([0, 1], [0, 1], '--k', transform=plt.gca().transAxes)
plt.xlabel('y (actual)')
plt.ylabel('y (test)')
plt.legend()
plot_path = path + '/plot_' + model_name + '.png'
plt.savefig(plot_path)
print("Plot saved location:", plot_path)
def save_dt_model(model_name, model, folder):
"""
Save model using Pickle binary format.
:param dataframe model: model reference
:param string model_name: title for the model used on the output filename
:param string folder: location of model output
"""
print("Saving model...")
model_file = folder + '/models/' + model_name + '.pkl'
path = open(model_file, 'wb')
pickle.dump(model, path)
print("Model saved location:", model_file)
def load_dt_model(pickle_model):
<|fim_middle|>
<|fim▁end|> | """
Retrieve model using Pickle binary format.
:param string pickle_model: location of Pickle model
:return: Pickle model for re-use
:rtype: object
"""
return pickle.loads(pickle_model) |
<|file_name|>DT_Utils.py<|end_file_name|><|fim▁begin|># Copyright 2017 Priscilla Boyd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
The DT_Utils module provides helper functions for Decision Tree algorithms implementation, model creation and
analysis.
"""
import pickle
from matplotlib import pyplot as plt
from sklearn.metrics import mean_squared_error
from tools.Utils import create_folder_if_not_exists
# noinspection PyTypeChecker
def <|fim_middle|>(model_name, model, X, y, y_actual, output_folder):
"""
Score a decision tree model.
:param string model_name: title for the model used on the output filename
:param dataframe model: model reference
:param dataframe X: examples
:param dataframe y: targets
:param dataframe y_actual: target results
:param string output_folder: location of the output / results
"""
print("Scoring model...")
model_score = model.score(X, y)
mse = mean_squared_error(y, y_actual)
mse_score = model_name, "- Mean Squared Error:", mse
accuracy = model_name, "- Accuracy score (%):", "{:.2%}".format(model_score)
# write to file
path = output_folder + '/models'
create_folder_if_not_exists(path)
filename = path + '/score_' + model_name + '.txt'
with open(filename, 'w') as scores:
print(mse_score, file=scores)
print(accuracy, file=scores)
scores.close()
print("Scores saved location:", filename)
def plot_dt(model_name, y_actual, y_test, output_folder):
"""
Plot decision tree, y (training) vs y (test/actual).
:param string model_name: title for the model used on the output filename
:param dataframe y_actual: target results
:param dataframe y_test: test targets
:param string output_folder: location of the output / results
"""
# initialise plot path
path = output_folder + '/models'
print("Plotting results...")
plt.scatter(y_actual, y_test, label='Duration')
plt.title('Decision Tree')
plt.plot([0, 1], [0, 1], '--k', transform=plt.gca().transAxes)
plt.xlabel('y (actual)')
plt.ylabel('y (test)')
plt.legend()
plot_path = path + '/plot_' + model_name + '.png'
plt.savefig(plot_path)
print("Plot saved location:", plot_path)
def save_dt_model(model_name, model, folder):
"""
Save model using Pickle binary format.
:param dataframe model: model reference
:param string model_name: title for the model used on the output filename
:param string folder: location of model output
"""
print("Saving model...")
model_file = folder + '/models/' + model_name + '.pkl'
path = open(model_file, 'wb')
pickle.dump(model, path)
print("Model saved location:", model_file)
def load_dt_model(pickle_model):
"""
Retrieve model using Pickle binary format.
:param string pickle_model: location of Pickle model
:return: Pickle model for re-use
:rtype: object
"""
return pickle.loads(pickle_model)
<|fim▁end|> | score_dt |
<|file_name|>DT_Utils.py<|end_file_name|><|fim▁begin|># Copyright 2017 Priscilla Boyd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
The DT_Utils module provides helper functions for Decision Tree algorithms implementation, model creation and
analysis.
"""
import pickle
from matplotlib import pyplot as plt
from sklearn.metrics import mean_squared_error
from tools.Utils import create_folder_if_not_exists
# noinspection PyTypeChecker
def score_dt(model_name, model, X, y, y_actual, output_folder):
"""
Score a decision tree model.
:param string model_name: title for the model used on the output filename
:param dataframe model: model reference
:param dataframe X: examples
:param dataframe y: targets
:param dataframe y_actual: target results
:param string output_folder: location of the output / results
"""
print("Scoring model...")
model_score = model.score(X, y)
mse = mean_squared_error(y, y_actual)
mse_score = model_name, "- Mean Squared Error:", mse
accuracy = model_name, "- Accuracy score (%):", "{:.2%}".format(model_score)
# write to file
path = output_folder + '/models'
create_folder_if_not_exists(path)
filename = path + '/score_' + model_name + '.txt'
with open(filename, 'w') as scores:
print(mse_score, file=scores)
print(accuracy, file=scores)
scores.close()
print("Scores saved location:", filename)
def <|fim_middle|>(model_name, y_actual, y_test, output_folder):
"""
Plot decision tree, y (training) vs y (test/actual).
:param string model_name: title for the model used on the output filename
:param dataframe y_actual: target results
:param dataframe y_test: test targets
:param string output_folder: location of the output / results
"""
# initialise plot path
path = output_folder + '/models'
print("Plotting results...")
plt.scatter(y_actual, y_test, label='Duration')
plt.title('Decision Tree')
plt.plot([0, 1], [0, 1], '--k', transform=plt.gca().transAxes)
plt.xlabel('y (actual)')
plt.ylabel('y (test)')
plt.legend()
plot_path = path + '/plot_' + model_name + '.png'
plt.savefig(plot_path)
print("Plot saved location:", plot_path)
def save_dt_model(model_name, model, folder):
"""
Save model using Pickle binary format.
:param dataframe model: model reference
:param string model_name: title for the model used on the output filename
:param string folder: location of model output
"""
print("Saving model...")
model_file = folder + '/models/' + model_name + '.pkl'
path = open(model_file, 'wb')
pickle.dump(model, path)
print("Model saved location:", model_file)
def load_dt_model(pickle_model):
"""
Retrieve model using Pickle binary format.
:param string pickle_model: location of Pickle model
:return: Pickle model for re-use
:rtype: object
"""
return pickle.loads(pickle_model)
<|fim▁end|> | plot_dt |
<|file_name|>DT_Utils.py<|end_file_name|><|fim▁begin|># Copyright 2017 Priscilla Boyd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
The DT_Utils module provides helper functions for Decision Tree algorithms implementation, model creation and
analysis.
"""
import pickle
from matplotlib import pyplot as plt
from sklearn.metrics import mean_squared_error
from tools.Utils import create_folder_if_not_exists
# noinspection PyTypeChecker
def score_dt(model_name, model, X, y, y_actual, output_folder):
"""
Score a decision tree model.
:param string model_name: title for the model used on the output filename
:param dataframe model: model reference
:param dataframe X: examples
:param dataframe y: targets
:param dataframe y_actual: target results
:param string output_folder: location of the output / results
"""
print("Scoring model...")
model_score = model.score(X, y)
mse = mean_squared_error(y, y_actual)
mse_score = model_name, "- Mean Squared Error:", mse
accuracy = model_name, "- Accuracy score (%):", "{:.2%}".format(model_score)
# write to file
path = output_folder + '/models'
create_folder_if_not_exists(path)
filename = path + '/score_' + model_name + '.txt'
with open(filename, 'w') as scores:
print(mse_score, file=scores)
print(accuracy, file=scores)
scores.close()
print("Scores saved location:", filename)
def plot_dt(model_name, y_actual, y_test, output_folder):
"""
Plot decision tree, y (training) vs y (test/actual).
:param string model_name: title for the model used on the output filename
:param dataframe y_actual: target results
:param dataframe y_test: test targets
:param string output_folder: location of the output / results
"""
# initialise plot path
path = output_folder + '/models'
print("Plotting results...")
plt.scatter(y_actual, y_test, label='Duration')
plt.title('Decision Tree')
plt.plot([0, 1], [0, 1], '--k', transform=plt.gca().transAxes)
plt.xlabel('y (actual)')
plt.ylabel('y (test)')
plt.legend()
plot_path = path + '/plot_' + model_name + '.png'
plt.savefig(plot_path)
print("Plot saved location:", plot_path)
def <|fim_middle|>(model_name, model, folder):
"""
Save model using Pickle binary format.
:param dataframe model: model reference
:param string model_name: title for the model used on the output filename
:param string folder: location of model output
"""
print("Saving model...")
model_file = folder + '/models/' + model_name + '.pkl'
path = open(model_file, 'wb')
pickle.dump(model, path)
print("Model saved location:", model_file)
def load_dt_model(pickle_model):
"""
Retrieve model using Pickle binary format.
:param string pickle_model: location of Pickle model
:return: Pickle model for re-use
:rtype: object
"""
return pickle.loads(pickle_model)
<|fim▁end|> | save_dt_model |
<|file_name|>DT_Utils.py<|end_file_name|><|fim▁begin|># Copyright 2017 Priscilla Boyd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
The DT_Utils module provides helper functions for Decision Tree algorithms implementation, model creation and
analysis.
"""
import pickle
from matplotlib import pyplot as plt
from sklearn.metrics import mean_squared_error
from tools.Utils import create_folder_if_not_exists
# noinspection PyTypeChecker
def score_dt(model_name, model, X, y, y_actual, output_folder):
"""
Score a decision tree model.
:param string model_name: title for the model used on the output filename
:param dataframe model: model reference
:param dataframe X: examples
:param dataframe y: targets
:param dataframe y_actual: target results
:param string output_folder: location of the output / results
"""
print("Scoring model...")
model_score = model.score(X, y)
mse = mean_squared_error(y, y_actual)
mse_score = model_name, "- Mean Squared Error:", mse
accuracy = model_name, "- Accuracy score (%):", "{:.2%}".format(model_score)
# write to file
path = output_folder + '/models'
create_folder_if_not_exists(path)
filename = path + '/score_' + model_name + '.txt'
with open(filename, 'w') as scores:
print(mse_score, file=scores)
print(accuracy, file=scores)
scores.close()
print("Scores saved location:", filename)
def plot_dt(model_name, y_actual, y_test, output_folder):
"""
Plot decision tree, y (training) vs y (test/actual).
:param string model_name: title for the model used on the output filename
:param dataframe y_actual: target results
:param dataframe y_test: test targets
:param string output_folder: location of the output / results
"""
# initialise plot path
path = output_folder + '/models'
print("Plotting results...")
plt.scatter(y_actual, y_test, label='Duration')
plt.title('Decision Tree')
plt.plot([0, 1], [0, 1], '--k', transform=plt.gca().transAxes)
plt.xlabel('y (actual)')
plt.ylabel('y (test)')
plt.legend()
plot_path = path + '/plot_' + model_name + '.png'
plt.savefig(plot_path)
print("Plot saved location:", plot_path)
def save_dt_model(model_name, model, folder):
"""
Save model using Pickle binary format.
:param dataframe model: model reference
:param string model_name: title for the model used on the output filename
:param string folder: location of model output
"""
print("Saving model...")
model_file = folder + '/models/' + model_name + '.pkl'
path = open(model_file, 'wb')
pickle.dump(model, path)
print("Model saved location:", model_file)
def <|fim_middle|>(pickle_model):
"""
Retrieve model using Pickle binary format.
:param string pickle_model: location of Pickle model
:return: Pickle model for re-use
:rtype: object
"""
return pickle.loads(pickle_model)
<|fim▁end|> | load_dt_model |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"<|fim▁hole|> driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()<|fim▁end|> | |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
<|fim_middle|>
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
<|fim▁end|> | def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0) |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
<|fim_middle|>
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
<|fim▁end|> | create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address() |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
<|fim_middle|>
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
<|fim▁end|> | contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0) |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def create_driver():
<|fim_middle|>
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
<|fim▁end|> | if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert() |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
<|fim_middle|>
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
<|fim▁end|> | if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert() |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
<|fim_middle|>
<|fim▁end|> | if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert() |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
<|fim_middle|>
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
<|fim▁end|> | delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0) |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
<|fim_middle|>
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
<|fim▁end|> | driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert() |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
<|fim_middle|>
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
<|fim▁end|> | frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert() |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
<|fim_middle|>
<|fim▁end|> | vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert() |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def <|fim_middle|>(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
<|fim▁end|> | setUp |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def <|fim_middle|>(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
<|fim▁end|> | test_delivery_trip |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def <|fim_middle|>():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
<|fim▁end|> | create_driver |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def <|fim_middle|>():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def create_vehicle():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
<|fim▁end|> | create_delivery_notfication |
<|file_name|>test_delivery_trip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
from frappe.utils import nowdate, add_days
from erpnext.tests.utils import create_test_contact_and_address
from erpnext.stock.doctype.delivery_trip.delivery_trip import notify_customers, get_contact_and_address
class TestDeliveryTrip(unittest.TestCase):
def setUp(self):
create_driver()
create_vehicle()
create_delivery_notfication()
create_test_contact_and_address()
def test_delivery_trip(self):
contact = get_contact_and_address("_Test Customer")
if not frappe.db.exists("Delivery Trip", "TOUR-00000"):
delivery_trip = frappe.new_doc("Delivery Trip")
delivery_trip.company = erpnext.get_default_company()
delivery_trip.date = add_days(nowdate(), 5)
delivery_trip.driver = "DRIVER-00001"
delivery_trip.vehicle = "JB 007"
delivery_trip.append("delivery_stops", {
"customer": "_Test Customer",
"address": contact.shipping_address.parent,
"contact": contact.contact_person.parent
})
delivery_trip.delivery_notification = 'Delivery Notification'
delivery_trip.insert()
sender_email = frappe.db.get_value("User", frappe.session.user, "email")
notify_customers(docname=delivery_trip.name, date=delivery_trip.date, driver=delivery_trip.driver,
vehicle=delivery_trip.vehicle,
sender_email=sender_email, delivery_notification=delivery_trip.delivery_notification)
self.assertEquals(delivery_trip.get("delivery_stops")[0].notified_by_email, 0)
def create_driver():
if not frappe.db.exists("Driver", "Newton Scmander"):
driver = frappe.new_doc("Driver")
driver.full_name = "Newton Scmander"
driver.cell_number = "98343424242"
driver.license_number = "B809"
driver.insert()
def create_delivery_notfication():
if not frappe.db.exists("Standard Reply", "Delivery Notification"):
frappe.get_doc({
'doctype': 'Standard Reply',
'name': 'Delivery Notification',
'response': 'Test Delivery Trip',
'subject': 'Test Subject',
'owner': frappe.session.user
}).insert()
def <|fim_middle|>():
if not frappe.db.exists("Vehicle", "JB 007"):
vehicle = frappe.get_doc({
"doctype": "Vehicle",
"license_plate": "JB 007",
"make": "Maruti",
"model": "PCM",
"last_odometer": 5000,
"acquisition_date": frappe.utils.nowdate(),
"location": "Mumbai",
"chassis_no": "1234ABCD",
"uom": "Litre",
"vehicle_value": frappe.utils.flt(500000)
})
vehicle.insert()
<|fim▁end|> | create_vehicle |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.core.urlresolvers import reverse
class Software(models.Model):
name = models.CharField(max_length=200)
def __unicode__(self):
return self.name<|fim▁hole|> def get_absolute_url(self):
return reverse('software_edit', kwargs={'pk': self.pk})<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.core.urlresolvers import reverse
class Software(models.Model):
<|fim_middle|>
<|fim▁end|> | name = models.CharField(max_length=200)
def __unicode__(self):
return self.name
def get_absolute_url(self):
return reverse('software_edit', kwargs={'pk': self.pk}) |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.core.urlresolvers import reverse
class Software(models.Model):
name = models.CharField(max_length=200)
def __unicode__(self):
<|fim_middle|>
def get_absolute_url(self):
return reverse('software_edit', kwargs={'pk': self.pk})
<|fim▁end|> | return self.name |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.core.urlresolvers import reverse
class Software(models.Model):
name = models.CharField(max_length=200)
def __unicode__(self):
return self.name
def get_absolute_url(self):
<|fim_middle|>
<|fim▁end|> | return reverse('software_edit', kwargs={'pk': self.pk}) |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.core.urlresolvers import reverse
class Software(models.Model):
name = models.CharField(max_length=200)
def <|fim_middle|>(self):
return self.name
def get_absolute_url(self):
return reverse('software_edit', kwargs={'pk': self.pk})
<|fim▁end|> | __unicode__ |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.core.urlresolvers import reverse
class Software(models.Model):
name = models.CharField(max_length=200)
def __unicode__(self):
return self.name
def <|fim_middle|>(self):
return reverse('software_edit', kwargs={'pk': self.pk})
<|fim▁end|> | get_absolute_url |
<|file_name|>Setup.py<|end_file_name|><|fim▁begin|>__author__ = 'Autio'<|fim▁hole|>import py2exe
setup(windows=['ShitCrimson.py'])<|fim▁end|> |
from distutils.core import setup |
<|file_name|>WMSOverlayServer.py<|end_file_name|><|fim▁begin|>from kvmap.code.projections import *
from urllib2 import urlopen
from httplib import HTTPConnection
from threading import Thread
from kivy.logger import Logger
from kivy.loader import Loader
from os.path import join, dirname
import time, os
import hashlib
try:
from pyproj import Proj
from xml.etree import ElementTree as ET
except:
pass
class WMSOverlayServer(object):
cache = {}
available_maptype = dict(roadmap='Roadmap') # default
type = "wms"
'''Generic WMS server'''
def __init__(self, progress_callback=None):
self.progress_callback = progress_callback
def setProgressCallback(self, progress_callback):
self.progress_callback = progress_callback
def getInfo(self, lat, lon, epsilon):
return None
def get(self, parent, width, height):
self.bl = parent.bottom_left
self.tr = parent.top_right
self.zoom = parent.zoom
url = self.geturl(self.bl[0], self.bl[1], self.tr[0], self.tr[1], self.zoom, width, height)
if not url:
return None
key = hashlib.md5(url).hexdigest()
if key in self.cache:
return self.cache[key]
try:
image = Loader.image('http://' + self.provider_host + url, progress_callback=self.progress_callback)
self.cache[key] = image
except Exception, e:
Logger.error('OverlayServer could not find (or read) image %s [%s]' % (url, e))
image = None
def getLegendGraphic(self):
if self.legend is None and not self.triedlegend:
self.triedlegend = True
layer = self.layer
if "," in layer:
layer = layer[layer.rindex(",") + 1:]
if self.legendlayer:
layer = self.legendlayer
url = self.baseurl + "?REQUEST=GetLegendGraphic&VERSION=1.0.0&FORMAT=image/png&LAYER=%s&ext=.png" % (layer)
try:
print 'http://' + self.provider_host + url
image = Loader.image('http://' + self.provider_host + url)
self.legend = image
except Exception, e:
Logger.error('OverlayServer could not find LEGENDGRAPHICS for %s %s' % (self.baseurl, layer))
return self.legend
def xy_to_co(self, lat, lon):
if self.customBounds:
x, y = latlon_to_custom(lat, lon, self.bounds)
elif self.isPLatLon: # patch for android - does not require pyproj library
x, y = lon, lat
elif self.isPGoogle: # patch for android - does not require pyproj library
x, y = latlon_to_google (lat, lon)
else:
x, y = transform(pLatlon, self.projection, lon, lat)
return x, y
def co_to_ll(self, x, y):
if self.customBounds:
u, v = custom_to_unit(lat, lon, self.bounds)
l, m = unit_to_latlon(u, v)
elif self.isPLatLon: # patch for android - does not require pyproj library
l, m = y, x
elif self.isPGoogle: # patch for android - does not require pyproj library
l, m = google_to_latlon (y, x)
else:
l, m = transform(self.projection, pLatlon, y, x)
return l, m
def geturl(self, lat1, lon1, lat2, lon2, zoom, w, h):
try:
x1, y1 = self.xy_to_co(lat1, lon1)
x2, y2 = self.xy_to_co(lat2, lon2)
return self.url + "&BBOX=%f,%f,%f,%f&WIDTH=%i&HEIGHT=%i&ext=.png" % (x1, y1, x2, y2, w, h)
except RuntimeError, e:
return None
def parseLayer(self, layer, data):
try:
name = layer.find("Name").text
except:
name = None
srss = layer.findall("SRS")
if name: # and srss:
data[name] = map(lambda x:x.text, srss)
if self.debug:
print "Provider %s provides layer %s in projections %s" % (self.provider_host, name, data[name])
subs = layer.findall("Layer")
for sub in subs:
self.parseLayer(sub, data)
def initFromGetCapabilities(self, host, baseurl, layer=None, index=0, srs=None):
self.debug = (layer == None) and (index == 0)
# GetCapabilities (Layers + SRS)
if layer is None or srs is None:
capabilities = urlopen(host + baseurl + "?SERVICE=WMS&VERSION=1.1.1&Request=GetCapabilities").read().strip()
try:
tree = ET.fromstring(capabilities)
if self.debug:
ET.dump(tree)
layers = tree.findall("Capability/Layer") # TODO: proper parsing of cascading layers and their SRS
data = {}
for l in layers:
self.parseLayer(l, data)
# Choose Layer and SRS by (alphabetical) index
if layer is None:
layer = sorted(data.keys())[index]
if srs is None:
srs = sorted(data[layer])[0]
except:
pass
print "Displaying from %s/%s: layer %s in SRS %s." % (host, baseurl, layer, srs)
# generate tile URL and init projection by EPSG code
self.layer = layer
self.baseurl = baseurl
self.url = baseurl + "?LAYERS=%s&SRS=%s&FORMAT=image/png&TRANSPARENT=TRUE&SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&STYLES=" % (layer, srs)<|fim▁hole|> self.isPLatLon = False
self.legend = None
self.legendlayer = None
self.triedlegend = False
if srs == "EPSG:4326":
self.isPLatLon = True
elif srs == "EPSG:900913" or srs == "EPSG:3857":
self.isPGoogle = True
try:
self.projection = pGoogle
except:
pass
else:
try:
self.projection = Proj(init=srs)
except:
pass<|fim▁end|> | self.isPGoogle = False |
<|file_name|>WMSOverlayServer.py<|end_file_name|><|fim▁begin|>from kvmap.code.projections import *
from urllib2 import urlopen
from httplib import HTTPConnection
from threading import Thread
from kivy.logger import Logger
from kivy.loader import Loader
from os.path import join, dirname
import time, os
import hashlib
try:
from pyproj import Proj
from xml.etree import ElementTree as ET
except:
pass
class WMSOverlayServer(object):
<|fim_middle|>
<|fim▁end|> | cache = {}
available_maptype = dict(roadmap='Roadmap') # default
type = "wms"
'''Generic WMS server'''
def __init__(self, progress_callback=None):
self.progress_callback = progress_callback
def setProgressCallback(self, progress_callback):
self.progress_callback = progress_callback
def getInfo(self, lat, lon, epsilon):
return None
def get(self, parent, width, height):
self.bl = parent.bottom_left
self.tr = parent.top_right
self.zoom = parent.zoom
url = self.geturl(self.bl[0], self.bl[1], self.tr[0], self.tr[1], self.zoom, width, height)
if not url:
return None
key = hashlib.md5(url).hexdigest()
if key in self.cache:
return self.cache[key]
try:
image = Loader.image('http://' + self.provider_host + url, progress_callback=self.progress_callback)
self.cache[key] = image
except Exception, e:
Logger.error('OverlayServer could not find (or read) image %s [%s]' % (url, e))
image = None
def getLegendGraphic(self):
if self.legend is None and not self.triedlegend:
self.triedlegend = True
layer = self.layer
if "," in layer:
layer = layer[layer.rindex(",") + 1:]
if self.legendlayer:
layer = self.legendlayer
url = self.baseurl + "?REQUEST=GetLegendGraphic&VERSION=1.0.0&FORMAT=image/png&LAYER=%s&ext=.png" % (layer)
try:
print 'http://' + self.provider_host + url
image = Loader.image('http://' + self.provider_host + url)
self.legend = image
except Exception, e:
Logger.error('OverlayServer could not find LEGENDGRAPHICS for %s %s' % (self.baseurl, layer))
return self.legend
def xy_to_co(self, lat, lon):
if self.customBounds:
x, y = latlon_to_custom(lat, lon, self.bounds)
elif self.isPLatLon: # patch for android - does not require pyproj library
x, y = lon, lat
elif self.isPGoogle: # patch for android - does not require pyproj library
x, y = latlon_to_google (lat, lon)
else:
x, y = transform(pLatlon, self.projection, lon, lat)
return x, y
def co_to_ll(self, x, y):
if self.customBounds:
u, v = custom_to_unit(lat, lon, self.bounds)
l, m = unit_to_latlon(u, v)
elif self.isPLatLon: # patch for android - does not require pyproj library
l, m = y, x
elif self.isPGoogle: # patch for android - does not require pyproj library
l, m = google_to_latlon (y, x)
else:
l, m = transform(self.projection, pLatlon, y, x)
return l, m
def geturl(self, lat1, lon1, lat2, lon2, zoom, w, h):
try:
x1, y1 = self.xy_to_co(lat1, lon1)
x2, y2 = self.xy_to_co(lat2, lon2)
return self.url + "&BBOX=%f,%f,%f,%f&WIDTH=%i&HEIGHT=%i&ext=.png" % (x1, y1, x2, y2, w, h)
except RuntimeError, e:
return None
def parseLayer(self, layer, data):
try:
name = layer.find("Name").text
except:
name = None
srss = layer.findall("SRS")
if name: # and srss:
data[name] = map(lambda x:x.text, srss)
if self.debug:
print "Provider %s provides layer %s in projections %s" % (self.provider_host, name, data[name])
subs = layer.findall("Layer")
for sub in subs:
self.parseLayer(sub, data)
def initFromGetCapabilities(self, host, baseurl, layer=None, index=0, srs=None):
self.debug = (layer == None) and (index == 0)
# GetCapabilities (Layers + SRS)
if layer is None or srs is None:
capabilities = urlopen(host + baseurl + "?SERVICE=WMS&VERSION=1.1.1&Request=GetCapabilities").read().strip()
try:
tree = ET.fromstring(capabilities)
if self.debug:
ET.dump(tree)
layers = tree.findall("Capability/Layer") # TODO: proper parsing of cascading layers and their SRS
data = {}
for l in layers:
self.parseLayer(l, data)
# Choose Layer and SRS by (alphabetical) index
if layer is None:
layer = sorted(data.keys())[index]
if srs is None:
srs = sorted(data[layer])[0]
except:
pass
print "Displaying from %s/%s: layer %s in SRS %s." % (host, baseurl, layer, srs)
# generate tile URL and init projection by EPSG code
self.layer = layer
self.baseurl = baseurl
self.url = baseurl + "?LAYERS=%s&SRS=%s&FORMAT=image/png&TRANSPARENT=TRUE&SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&STYLES=" % (layer, srs)
self.isPGoogle = False
self.isPLatLon = False
self.legend = None
self.legendlayer = None
self.triedlegend = False
if srs == "EPSG:4326":
self.isPLatLon = True
elif srs == "EPSG:900913" or srs == "EPSG:3857":
self.isPGoogle = True
try:
self.projection = pGoogle
except:
pass
else:
try:
self.projection = Proj(init=srs)
except:
pass |
<|file_name|>WMSOverlayServer.py<|end_file_name|><|fim▁begin|>from kvmap.code.projections import *
from urllib2 import urlopen
from httplib import HTTPConnection
from threading import Thread
from kivy.logger import Logger
from kivy.loader import Loader
from os.path import join, dirname
import time, os
import hashlib
try:
from pyproj import Proj
from xml.etree import ElementTree as ET
except:
pass
class WMSOverlayServer(object):
cache = {}
available_maptype = dict(roadmap='Roadmap') # default
type = "wms"
'''Generic WMS server'''
def __init__(self, progress_callback=None):
<|fim_middle|>
def setProgressCallback(self, progress_callback):
self.progress_callback = progress_callback
def getInfo(self, lat, lon, epsilon):
return None
def get(self, parent, width, height):
self.bl = parent.bottom_left
self.tr = parent.top_right
self.zoom = parent.zoom
url = self.geturl(self.bl[0], self.bl[1], self.tr[0], self.tr[1], self.zoom, width, height)
if not url:
return None
key = hashlib.md5(url).hexdigest()
if key in self.cache:
return self.cache[key]
try:
image = Loader.image('http://' + self.provider_host + url, progress_callback=self.progress_callback)
self.cache[key] = image
except Exception, e:
Logger.error('OverlayServer could not find (or read) image %s [%s]' % (url, e))
image = None
def getLegendGraphic(self):
if self.legend is None and not self.triedlegend:
self.triedlegend = True
layer = self.layer
if "," in layer:
layer = layer[layer.rindex(",") + 1:]
if self.legendlayer:
layer = self.legendlayer
url = self.baseurl + "?REQUEST=GetLegendGraphic&VERSION=1.0.0&FORMAT=image/png&LAYER=%s&ext=.png" % (layer)
try:
print 'http://' + self.provider_host + url
image = Loader.image('http://' + self.provider_host + url)
self.legend = image
except Exception, e:
Logger.error('OverlayServer could not find LEGENDGRAPHICS for %s %s' % (self.baseurl, layer))
return self.legend
def xy_to_co(self, lat, lon):
if self.customBounds:
x, y = latlon_to_custom(lat, lon, self.bounds)
elif self.isPLatLon: # patch for android - does not require pyproj library
x, y = lon, lat
elif self.isPGoogle: # patch for android - does not require pyproj library
x, y = latlon_to_google (lat, lon)
else:
x, y = transform(pLatlon, self.projection, lon, lat)
return x, y
def co_to_ll(self, x, y):
if self.customBounds:
u, v = custom_to_unit(lat, lon, self.bounds)
l, m = unit_to_latlon(u, v)
elif self.isPLatLon: # patch for android - does not require pyproj library
l, m = y, x
elif self.isPGoogle: # patch for android - does not require pyproj library
l, m = google_to_latlon (y, x)
else:
l, m = transform(self.projection, pLatlon, y, x)
return l, m
def geturl(self, lat1, lon1, lat2, lon2, zoom, w, h):
try:
x1, y1 = self.xy_to_co(lat1, lon1)
x2, y2 = self.xy_to_co(lat2, lon2)
return self.url + "&BBOX=%f,%f,%f,%f&WIDTH=%i&HEIGHT=%i&ext=.png" % (x1, y1, x2, y2, w, h)
except RuntimeError, e:
return None
def parseLayer(self, layer, data):
try:
name = layer.find("Name").text
except:
name = None
srss = layer.findall("SRS")
if name: # and srss:
data[name] = map(lambda x:x.text, srss)
if self.debug:
print "Provider %s provides layer %s in projections %s" % (self.provider_host, name, data[name])
subs = layer.findall("Layer")
for sub in subs:
self.parseLayer(sub, data)
def initFromGetCapabilities(self, host, baseurl, layer=None, index=0, srs=None):
self.debug = (layer == None) and (index == 0)
# GetCapabilities (Layers + SRS)
if layer is None or srs is None:
capabilities = urlopen(host + baseurl + "?SERVICE=WMS&VERSION=1.1.1&Request=GetCapabilities").read().strip()
try:
tree = ET.fromstring(capabilities)
if self.debug:
ET.dump(tree)
layers = tree.findall("Capability/Layer") # TODO: proper parsing of cascading layers and their SRS
data = {}
for l in layers:
self.parseLayer(l, data)
# Choose Layer and SRS by (alphabetical) index
if layer is None:
layer = sorted(data.keys())[index]
if srs is None:
srs = sorted(data[layer])[0]
except:
pass
print "Displaying from %s/%s: layer %s in SRS %s." % (host, baseurl, layer, srs)
# generate tile URL and init projection by EPSG code
self.layer = layer
self.baseurl = baseurl
self.url = baseurl + "?LAYERS=%s&SRS=%s&FORMAT=image/png&TRANSPARENT=TRUE&SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&STYLES=" % (layer, srs)
self.isPGoogle = False
self.isPLatLon = False
self.legend = None
self.legendlayer = None
self.triedlegend = False
if srs == "EPSG:4326":
self.isPLatLon = True
elif srs == "EPSG:900913" or srs == "EPSG:3857":
self.isPGoogle = True
try:
self.projection = pGoogle
except:
pass
else:
try:
self.projection = Proj(init=srs)
except:
pass
<|fim▁end|> | self.progress_callback = progress_callback |
<|file_name|>WMSOverlayServer.py<|end_file_name|><|fim▁begin|>from kvmap.code.projections import *
from urllib2 import urlopen
from httplib import HTTPConnection
from threading import Thread
from kivy.logger import Logger
from kivy.loader import Loader
from os.path import join, dirname
import time, os
import hashlib
try:
from pyproj import Proj
from xml.etree import ElementTree as ET
except:
pass
class WMSOverlayServer(object):
cache = {}
available_maptype = dict(roadmap='Roadmap') # default
type = "wms"
'''Generic WMS server'''
def __init__(self, progress_callback=None):
self.progress_callback = progress_callback
def setProgressCallback(self, progress_callback):
<|fim_middle|>
def getInfo(self, lat, lon, epsilon):
return None
def get(self, parent, width, height):
self.bl = parent.bottom_left
self.tr = parent.top_right
self.zoom = parent.zoom
url = self.geturl(self.bl[0], self.bl[1], self.tr[0], self.tr[1], self.zoom, width, height)
if not url:
return None
key = hashlib.md5(url).hexdigest()
if key in self.cache:
return self.cache[key]
try:
image = Loader.image('http://' + self.provider_host + url, progress_callback=self.progress_callback)
self.cache[key] = image
except Exception, e:
Logger.error('OverlayServer could not find (or read) image %s [%s]' % (url, e))
image = None
def getLegendGraphic(self):
if self.legend is None and not self.triedlegend:
self.triedlegend = True
layer = self.layer
if "," in layer:
layer = layer[layer.rindex(",") + 1:]
if self.legendlayer:
layer = self.legendlayer
url = self.baseurl + "?REQUEST=GetLegendGraphic&VERSION=1.0.0&FORMAT=image/png&LAYER=%s&ext=.png" % (layer)
try:
print 'http://' + self.provider_host + url
image = Loader.image('http://' + self.provider_host + url)
self.legend = image
except Exception, e:
Logger.error('OverlayServer could not find LEGENDGRAPHICS for %s %s' % (self.baseurl, layer))
return self.legend
def xy_to_co(self, lat, lon):
if self.customBounds:
x, y = latlon_to_custom(lat, lon, self.bounds)
elif self.isPLatLon: # patch for android - does not require pyproj library
x, y = lon, lat
elif self.isPGoogle: # patch for android - does not require pyproj library
x, y = latlon_to_google (lat, lon)
else:
x, y = transform(pLatlon, self.projection, lon, lat)
return x, y
def co_to_ll(self, x, y):
if self.customBounds:
u, v = custom_to_unit(lat, lon, self.bounds)
l, m = unit_to_latlon(u, v)
elif self.isPLatLon: # patch for android - does not require pyproj library
l, m = y, x
elif self.isPGoogle: # patch for android - does not require pyproj library
l, m = google_to_latlon (y, x)
else:
l, m = transform(self.projection, pLatlon, y, x)
return l, m
def geturl(self, lat1, lon1, lat2, lon2, zoom, w, h):
try:
x1, y1 = self.xy_to_co(lat1, lon1)
x2, y2 = self.xy_to_co(lat2, lon2)
return self.url + "&BBOX=%f,%f,%f,%f&WIDTH=%i&HEIGHT=%i&ext=.png" % (x1, y1, x2, y2, w, h)
except RuntimeError, e:
return None
def parseLayer(self, layer, data):
try:
name = layer.find("Name").text
except:
name = None
srss = layer.findall("SRS")
if name: # and srss:
data[name] = map(lambda x:x.text, srss)
if self.debug:
print "Provider %s provides layer %s in projections %s" % (self.provider_host, name, data[name])
subs = layer.findall("Layer")
for sub in subs:
self.parseLayer(sub, data)
def initFromGetCapabilities(self, host, baseurl, layer=None, index=0, srs=None):
self.debug = (layer == None) and (index == 0)
# GetCapabilities (Layers + SRS)
if layer is None or srs is None:
capabilities = urlopen(host + baseurl + "?SERVICE=WMS&VERSION=1.1.1&Request=GetCapabilities").read().strip()
try:
tree = ET.fromstring(capabilities)
if self.debug:
ET.dump(tree)
layers = tree.findall("Capability/Layer") # TODO: proper parsing of cascading layers and their SRS
data = {}
for l in layers:
self.parseLayer(l, data)
# Choose Layer and SRS by (alphabetical) index
if layer is None:
layer = sorted(data.keys())[index]
if srs is None:
srs = sorted(data[layer])[0]
except:
pass
print "Displaying from %s/%s: layer %s in SRS %s." % (host, baseurl, layer, srs)
# generate tile URL and init projection by EPSG code
self.layer = layer
self.baseurl = baseurl
self.url = baseurl + "?LAYERS=%s&SRS=%s&FORMAT=image/png&TRANSPARENT=TRUE&SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&STYLES=" % (layer, srs)
self.isPGoogle = False
self.isPLatLon = False
self.legend = None
self.legendlayer = None
self.triedlegend = False
if srs == "EPSG:4326":
self.isPLatLon = True
elif srs == "EPSG:900913" or srs == "EPSG:3857":
self.isPGoogle = True
try:
self.projection = pGoogle
except:
pass
else:
try:
self.projection = Proj(init=srs)
except:
pass
<|fim▁end|> | self.progress_callback = progress_callback |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.