content
stringlengths 7
1.05M
| fixed_cases
stringlengths 1
1.28M
|
---|---|
# Program to find whether given input string has balanced brackets or not
def isBalanced(s):
a=[]
for i in range(len(s)):
if s[i]=='{' or s[i]=='[' or s[i]=='(':
a.append(s[i])
if s[i]=='}':
if len(a)==0:
return "NO"
else:
if a[-1]=='{':
a.pop()
else:
break
if s[i]==']':
if len(a)==0:
return "NO"
else:
if a[-1]=='[':
a.pop()
else:
break
if s[i]==')':
if len(a)==0:
return "NO"
else:
if a[-1]=='(':
a.pop()
else:
break
if len(a)==0:
return "YES"
else:
return "NO"
inp = input('Enter your query string: ')
#sample input: {)[](}]}]}))}(())(
print(isBalanced(inp))
|
def is_balanced(s):
a = []
for i in range(len(s)):
if s[i] == '{' or s[i] == '[' or s[i] == '(':
a.append(s[i])
if s[i] == '}':
if len(a) == 0:
return 'NO'
elif a[-1] == '{':
a.pop()
else:
break
if s[i] == ']':
if len(a) == 0:
return 'NO'
elif a[-1] == '[':
a.pop()
else:
break
if s[i] == ')':
if len(a) == 0:
return 'NO'
elif a[-1] == '(':
a.pop()
else:
break
if len(a) == 0:
return 'YES'
else:
return 'NO'
inp = input('Enter your query string: ')
print(is_balanced(inp))
|
#Requests stress Pod resources for a given period of time to simulate load
#deploymentLabel is the Deployment that the request is beings sent to
#cpuCost is the number of threads that the request will use on a pod
#execTime is how long the request will use those resource for before completing
class Request:
def __init__(self, INFOLIST):
self.label = INFOLIST[0]
self.deploymentLabel = INFOLIST[1]
self.execTime = int(INFOLIST[2])
|
class Request:
def __init__(self, INFOLIST):
self.label = INFOLIST[0]
self.deploymentLabel = INFOLIST[1]
self.execTime = int(INFOLIST[2])
|
"""
datos de entrada
sueldo--->s--->int
ventas realizadas departamento 1--->vrd1--->int
ventas realizadas departamento 2--->vrd2--->int
ventas realizadas departamento 3--->vrd3--->int
"""
#entradas
s=int(input("ingrese el sueldo base: "))
vrd1=int(input("ingrese el numero de ventas realizadas por departamento 1: "))
vrd2=int(input("ingrese el numero de ventas realizadas por departamento 2: "))
vrd3=int(input("ingrese el numero de ventas realizadas por departamento 3: "))
#caja negra
vt=(vrd1+vrd2+vrd3)
por=(vt*33/100)
if(vrd1>por):
s1=s+s*0.20
print("ventas mayor al 33%",s1)
else:
print("venta menor al 33%",s)
if (vrd2>por):
s2=s+s*0.20
print("ventas mayor al 33%",s2)
else:
print("venta menor al 33%",s)
if (vrd3>por):
s3=s+s*0.20
print("venta mayor a 33%",s3)
else:
print("venta menor al 33%",s)
|
"""
datos de entrada
sueldo--->s--->int
ventas realizadas departamento 1--->vrd1--->int
ventas realizadas departamento 2--->vrd2--->int
ventas realizadas departamento 3--->vrd3--->int
"""
s = int(input('ingrese el sueldo base: '))
vrd1 = int(input('ingrese el numero de ventas realizadas por departamento 1: '))
vrd2 = int(input('ingrese el numero de ventas realizadas por departamento 2: '))
vrd3 = int(input('ingrese el numero de ventas realizadas por departamento 3: '))
vt = vrd1 + vrd2 + vrd3
por = vt * 33 / 100
if vrd1 > por:
s1 = s + s * 0.2
print('ventas mayor al 33%', s1)
else:
print('venta menor al 33%', s)
if vrd2 > por:
s2 = s + s * 0.2
print('ventas mayor al 33%', s2)
else:
print('venta menor al 33%', s)
if vrd3 > por:
s3 = s + s * 0.2
print('venta mayor a 33%', s3)
else:
print('venta menor al 33%', s)
|
dc_shell_setup_tcl = {
############
# default
############
'default': """
set BRICK_RESULTS [getenv "BRICK_RESULTS"];
set TSMC_DIR [getenv "TSMC_DIR"];
set DESIGN_NAME "%s"; # The name of the top-level design.
#############################################################
# The following variables will be set automatically during
# 'icdc setup' -> 'commit setup' execution
# Manual changes could be made, but will be overwritten
# when 'icdc setup' is executed again
#
##############################################################
# START Auto Setup Section
# Additional search path to be added
# to the default search path
# The search paths belong to the following libraries:
# * core standard cell library
# * analog I/O standard cell library
# * digital I/O standard cell library
# * SRAM macro library
# * full custom macro libraries
set ADDITIONAL_SEARCH_PATHS [list \\
%s
]
# Target technology logical libraries
set TARGET_LIBRARY_FILES [list \\
]
# List of max min library pairs "max1 min1 max2 min2"
set MIN_LIBRARY_FILES [list \\
]
# END Auto Setup Section
##############################################################
# Extra link logical libraries
set ADDITIONAL_LINK_LIB_FILES [list \\
%s
]
##############################################################
# Topo Mode Settings
# no auto setup implemented so far
# please make necessary modification
#
set MW_REFERENCE_LIB_DIRS ""; # Milkyway reference libraries
set TECH_FILE ""; # Milkyway technology file
set MAP_FILE ""; # Mapping file for TLUplus
set TLUPLUS_MAX_FILE ""; # Max TLUplus file
set TLUPLUS_MIN_FILE ""; # Min TLUplus file
set MW_POWER_NET ""; #
set MW_POWER_PORT ""; #
set MW_GROUND_NET ""; #
set MW_GROUND_PORT ""; #
""",
#############
# TSMC65
#############
'tsmc65': """
set BRICK_RESULTS [getenv "BRICK_RESULTS"];
set TSMC_DIR [getenv "TSMC_DIR"];
set DESIGN_NAME "%s"; # The name of the top-level design.
#############################################################
# The following variables will be set automatically during
# 'icdc setup' -> 'commit setup' execution
# Manual changes could be made, but will be overwritten
# when 'icdc setup' is executed again
#
##############################################################
# START Auto Setup Section
# Additional search path to be added
# to the default search path
# The search paths belong to the following libraries:
# * core standard cell library
# * analog I/O standard cell library
# * digital I/O standard cell library
# * SRAM macro library
# * full custom macro libraries
set ADDITIONAL_SEARCH_PATHS [list \\
"$TSMC_DIR/digital/Front_End/timing_power_noise/NLDM/tcbn65lp_200a" \\
"$TSMC_DIR/digital/Front_End/timing_power_noise/NLDM/tpan65lpnv2_140b" \\
"$TSMC_DIR/digital/Front_End/timing_power_noise/NLDM/tpdn65lpnv2_140b" \\
"$TSMC_DIR/sram/tsdn65lpa4096x32m8f_200b/SYNOPSYS" \\
%s
]
# Target technology logical libraries
set TARGET_LIBRARY_FILES [list \\
"tcbn65lpwc.db" \\
"tcbn65lpwc0d90d9.db" \\
"tpan65lpnv2wc.db" \\
"tpdn65lpnv2wc.db" \\
"tsdn65lpa4096x32m8f_200b_tt1p2v40c.db" \\
]
# List of max min library pairs "max1 min1 max2 min2"
set MIN_LIBRARY_FILES [list \\
"tcbn65lpwc.db" "tcbn65lpbc.db" \\
"tpan65lpnv2wc.db" "tpan65lpnv2bc.db" \\
"tpdn65lpnv2wc.db" "tpdn65lpnv2bc.db" \\
]
# END Auto Setup Section
##############################################################
# Extra link logical libraries
set ADDITIONAL_LINK_LIB_FILES [list \\
%s
]
##############################################################
# Topo Mode Settings
# no auto setup implemented so far
# please make necessary modification
#
set MW_REFERENCE_LIB_DIRS "$TSMC_DIR/digital/Back_End/milkyway/tcbn65lp_200a/frame_only/tcbn65lp $TSMC_DIR/digital/Back_End/milkyway/tpdn65lpnv2_140b/mt_2/9lm/frame_only/tpdn65lpnv2 $TSMC_DIR/digital/Back_End/milkyway/tpan65lpnv2_140b/mt_2/9lm/frame_only/tpan65lpnv2"; # Milkyway reference libraries
set TECH_FILE "$TSMC_DIR/digital/Back_End/milkyway/tcbn65lp_200a/techfiles/tsmcn65_9lmT2.tf"; # Milkyway technology file
set MAP_FILE "$TSMC_DIR/digital/Back_End/milkyway/tcbn65lp_200a/techfiles/tluplus/star.map_9M"; # Mapping file for TLUplus
set TLUPLUS_MAX_FILE "$TSMC_DIR/digital/Back_End/milkyway/tcbn65lp_200a/techfiles/tluplus/cln65lp_1p09m+alrdl_rcworst_top2.tluplus"; # Max TLUplus file
set TLUPLUS_MIN_FILE "$TSMC_DIR/digital/Back_End/milkyway/tcbn65lp_200a/techfiles/tluplus/cln65lp_1p09m+alrdl_rcbest_top2.tluplus"; # Min TLUplus file
set MW_POWER_NET ""; #
set MW_POWER_PORT ""; #
set MW_GROUND_NET ""; #
set MW_GROUND_PORT ""; #
if {[shell_is_in_topographical_mode]} {
set_preferred_routing_direction -layer M1 -dir horizontal
set_preferred_routing_direction -layer M2 -dir vertical
set_preferred_routing_direction -layer M3 -dir horizontal
set_preferred_routing_direction -layer M4 -dir vertical
set_preferred_routing_direction -layer M5 -dir horizontal
set_preferred_routing_direction -layer M6 -dir vertical
set_preferred_routing_direction -layer M7 -dir horizontal
set_preferred_routing_direction -layer M8 -dir vertical
set_preferred_routing_direction -layer M9 -dir horizontal
set_preferred_routing_direction -layer AP -dir vertical
}
"""
}
dc_shell_main_tcl = """
#
# dc_shell script based on DC-ICPRO
#
# dc_shell_setup.tcl
source %s
# sourcelist
# the following file includes all RTL-Sources as ordered lists
source %s
#########################################################################
# Setup Variables
#########################################################################
#set alib_library_analysis_path $ICPRO_DIR/tmp/dc_shell ; # Point to a central cache of analyzed libraries
set clock_gating_enabled 1
# make design read-in a bit more verbose...
#set hdlin_keep_signal_name user
set hdlin_report_floating_net_to_ground true
# Enables shortening of names as the concatenation of interface
# signals results in names > 1000s of characters
set hdlin_shorten_long_module_name true
# Specify minimum number of characters. Default: 256
set hdlin_module_name_limit 100
set DCT_IGNORED_ROUTING_LAYERS "" ; # Enter the same ignored routing
# layers as P&R
set REPORTS_DIR "$BRICK_RESULTS/dc_shell_$DESIGN_NAME/reports"
set RESULTS_DIR "$BRICK_RESULTS/dc_shell_$DESIGN_NAME/results"
set tool "dc"
set target_library $TARGET_LIBRARY_FILES
set synthetic_library dw_foundation.sldb
set link_library "* $target_library $ADDITIONAL_LINK_LIB_FILES $synthetic_library"
set search_path [concat $search_path $ADDITIONAL_SEARCH_PATHS]
# add default icpro search path for global verilog sources
set user_search_path %s
set search_path [concat $search_path $user_search_path]
# Set min libraries if they exist
foreach {max_library min_library} $MIN_LIBRARY_FILES {
set_min_library $max_library -min_version $min_library }
if {[shell_is_in_topographical_mode]} {
set mw_logic1_net $MW_POWER_NET
set mw_logic0_net $MW_GROUND_NET
set mw_reference_library $MW_REFERENCE_LIB_DIRS
set mw_design_library ${DESIGN_NAME}_LIB
set mw_site_name_mapping [list CORE unit Core unit core unit]
create_mw_lib -technology $TECH_FILE \
-mw_reference_library $mw_reference_library \
$mw_design_library
open_mw_lib $mw_design_library
set_tlu_plus_files -max_tluplus $TLUPLUS_MAX_FILE \
-min_tluplus $TLUPLUS_MIN_FILE \
-tech2itf_map $MAP_FILE
check_tlu_plus_files
check_library
}
## set multicore usage
set_host_options -max_cores %s
echo "Information: Starting Synopsys Design Compiler synthesis run ... "
echo "Information: Filtered command line output. For details see 'logfiles/compile.log'! "
#################################################################################
# Setup for Formality verification
#################################################################################
set_svf $RESULTS_DIR/${DESIGN_NAME}.svf
#################################################################################
# Read in the RTL Design
#
# Read in the RTL source files or read in the elaborated design (DDC).
#################################################################################
define_design_lib WORK -path ./worklib
if { [llength $verilog_source_list] } {
echo "Information: Analyzing Verilog sources ... "
analyze -format verilog $verilog_source_list
}
if { [llength $vhdl_source_list] } {
echo "Information: Analyzing VHDL sources ... "
analyze -format vhdl $vhdl_source_list
}
if { [llength $systemverilog_source_list] } {
echo "Information: Analyzing SystemVerilog sources ... "
analyze -format sverilog $systemverilog_source_list
}
echo "Information: Elaborating top-level '$DESIGN_NAME' ... "
elaborate $DESIGN_NAME
write -format ddc -hierarchy -output $RESULTS_DIR/${DESIGN_NAME}.elab.ddc
list_designs -show_file > $REPORTS_DIR/$DESIGN_NAME.elab.list_designs
report_reference -hier > $REPORTS_DIR/$DESIGN_NAME.elab.report_reference
echo "Information: Linking design ... "
link > $REPORTS_DIR/$DESIGN_NAME.link
############################################################################
# Apply Logical Design Constraints
############################################################################
echo "Information: Reading design constraints ... "
set constraints_file %s
if {$constraints_file != 0} {
source -echo -verbose ${constraints_file}
}
# Enable area optimization in all flows
set_max_area 0
############################################################################
# Create Default Path Groups
# Remove these path group settings if user path groups already defined
############################################################################
set ports_clock_root [get_ports [all_fanout -flat -clock_tree -level 0]]
group_path -name REGOUT -to [all_outputs]
group_path -name REGIN -from [remove_from_collection [all_inputs] $ports_clock_root]
group_path -name FEEDTHROUGH -from [remove_from_collection [all_inputs] $ports_clock_root] -to [all_outputs]
#################################################################################
# Power Optimization Section
#################################################################################
if ($clock_gating_enabled) {
set_clock_gating_style \
-positive_edge_logic integrated \
-negative_edge_logic integrated \
-control_point before \
-minimum_bitwidth 4 \
-max_fanout 8
}
#############################################################################
# Apply Power Optimization Constraints
#############################################################################
# Include a SAIF file, if possible, for power optimization
# read_saif -auto_map_names -input ${DESIGN_NAME}.saif -instance < DESIGN_INSTANCE > -verbose
if {[shell_is_in_topographical_mode]} {
# Enable power prediction for this DC-T session using clock tree estimation.
set_power_prediction true
}
# set_max_leakage_power 0
# set_max_dynamic_power 0
set_max_total_power 0
if {[shell_is_in_topographical_mode]} {
# Specify ignored layers for routing to improve correlation
# Use the same ignored layers that will be used during place and route
if { $DCT_IGNORED_ROUTING_LAYERS != ""} {
set_ignored_layers $DCT_IGNORED_ROUTING_LAYERS
}
report_ignored_layers
# Apply Physical Design Constraints
# set_fuzzy_query_options -hierarchical_separators {/ _ .} \
# -bus_name_notations {[] __ ()} \
# -class {cell pin port net} \
# -show
#extract_physical_constraints $ICPRO_DIR/units/top/export/encounter/$DESIGN_NAME.def
extract_physical_constraints ./$DESIGN_NAME.def
# OR
# source -echo -verbose ${DESIGN_NAME}.physical_constraints.tcl
}
#
# check design
#
echo "Information: Checking design (see '$REPORTS_DIR/$DESIGN_NAME.check_design'). "
check_design > $REPORTS_DIR/$DESIGN_NAME.check_design
#########################################################
# Apply Additional Optimization Constraints
#########################################################
# Prevent assignment statements in the Verilog netlist.
set verilogout_no_tri true
# Uniquify design
uniquify -dont_skip_empty_designs
#########################################################
# Compile the Design
#
# Recommended Options:
#
# -scan
# -retime
# -timing_high_effort_script
# -area_high_effort_script
#
#########################################################
echo "Information: Starting top down compilation (compile_ultra) ... "
remove_unconnected_ports [find cell -hierarchy *]
#
# set to true to enable
# enable scan insertion during compilation
#
if { %s } {
# compile design using scan ffs
compile_ultra -scan
#
# modify insert_scan_script template for your DFT requirements
#
set insert_scan_script "./scripts/${DESIGN_NAME}.insert_scan.tcl"
if { ! [file exists $insert_scan_script] } {
echo "ERROR: Insert scan script '$insert_scan_script' not found. "
exit 1
} else {
source $insert_scan_script
}
} else {
# compilation without scan insertion
# added option to keep hierarchy
compile_ultra %s
}
echo "Information: Finished top down compilation. "
#################################################################################
# Write Out Final Design
#################################################################################
remove_unconnected_ports [find cell -hierarchy *]
change_names -rules verilog -hierarchy
echo "Information: Writing results to '$RESULTS_DIR' ... "
write -format ddc -hierarchy -output $RESULTS_DIR/${DESIGN_NAME}.ddc
write -f verilog -hier -output $RESULTS_DIR/${DESIGN_NAME}.v
if {[shell_is_in_topographical_mode]} {
# write_milkyway uses: mw_logic1_net, mw_logic0_net and mw_design_library variables from dc_setup.tcl
#write_milkyway -overwrite -output ${DESIGN_NAME}_DCT
write_physical_constraints -output ${RESULTS_DIR}/${DESIGN_NAME}.mapped.physical_constraints.tcl
# Do not write out net RC info into SDC
set write_sdc_output_lumped_net_capacitance false
set write_sdc_output_net_resistance false
}
# Write SDF backannotation data
write_sdf $RESULTS_DIR/${DESIGN_NAME}.sdf
write_sdc -nosplit $RESULTS_DIR/${DESIGN_NAME}.sdc
echo "Information: Writing reports to '$REPORTS_DIR' ... "
#
# check timing/contraints
#
report_design > $REPORTS_DIR/$DESIGN_NAME.report_design
check_timing > $REPORTS_DIR/$DESIGN_NAME.check_timing
report_port > $REPORTS_DIR/$DESIGN_NAME.report_port
report_timing_requirements > $REPORTS_DIR/$DESIGN_NAME.report_timing_requirements
report_clock > $REPORTS_DIR/$DESIGN_NAME.report_clock
report_constraint > $REPORTS_DIR/$DESIGN_NAME.report_constraint
set timing_bidirectional_pin_max_transition_checks "driver"
report_constraint -max_transition -all_vio >> $REPORTS_DIR/$DESIGN_NAME.report_constraint
set timing_bidirectional_pin_max_transition_checks "load"
report_constraint -max_transition -all_vio >> $REPORTS_DIR/$DESIGN_NAME.report_constraint
report_constraints -all_violators > ${REPORTS_DIR}/${DESIGN_NAME}.report_constraints_all_violators
#
# report design
#
report_timing -max_paths 10 > $REPORTS_DIR/$DESIGN_NAME.report_timing
report_area > $REPORTS_DIR/$DESIGN_NAME.report_area
report_power > $REPORTS_DIR/$DESIGN_NAME.report_power
report_fsm > $REPORTS_DIR/$DESIGN_NAME.report_fsm
exit
"""
|
dc_shell_setup_tcl = {'default': '\nset BRICK_RESULTS\t\t\t\t[getenv "BRICK_RESULTS"]; \nset TSMC_DIR [getenv "TSMC_DIR"]; \n\nset DESIGN_NAME "%s"; # The name of the top-level design.\n\n#############################################################\n# The following variables will be set automatically during\n# \'icdc setup\' -> \'commit setup\' execution\n# Manual changes could be made, but will be overwritten \n# when \'icdc setup\' is executed again\n#\n##############################################################\n# START Auto Setup Section\n\n# Additional search path to be added\n# to the default search path\n# The search paths belong to the following libraries:\n# * core standard cell library\n# * analog I/O standard cell library\n# * digital I/O standard cell library\n# * SRAM macro library\n# * full custom macro libraries\nset ADDITIONAL_SEARCH_PATHS\t[list \\\n %s\n]\n\n# Target technology logical libraries \t\t\t\t\t\t\t\nset TARGET_LIBRARY_FILES\t[list \\\n]\n\n# List of max min library pairs "max1 min1 max2 min2"\nset MIN_LIBRARY_FILES\t[list \\\n]\n\n# END Auto Setup Section \n##############################################################\n\n# Extra link logical libraries\nset ADDITIONAL_LINK_LIB_FILES [list \\\n %s\n]\n\n\n##############################################################\n# Topo Mode Settings\n# no auto setup implemented so far\n# please make necessary modification\n#\nset MW_REFERENCE_LIB_DIRS ""; # Milkyway reference libraries\nset TECH_FILE ""; \t\t\t# Milkyway technology file\nset MAP_FILE ""; \t\t\t# Mapping file for TLUplus\nset TLUPLUS_MAX_FILE ""; \t# Max TLUplus file\nset TLUPLUS_MIN_FILE ""; \t# Min TLUplus file\nset MW_POWER_NET ""; \t\t#\nset MW_POWER_PORT ""; \t #\nset MW_GROUND_NET ""; \t #\nset MW_GROUND_PORT ""; \t #\n', 'tsmc65': '\nset BRICK_RESULTS\t\t\t\t[getenv "BRICK_RESULTS"]; \nset TSMC_DIR [getenv "TSMC_DIR"]; \n\nset DESIGN_NAME "%s"; # The name of the top-level design.\n\n#############################################################\n# The following variables will be set automatically during\n# \'icdc setup\' -> \'commit setup\' execution\n# Manual changes could be made, but will be overwritten \n# when \'icdc setup\' is executed again\n#\n##############################################################\n# START Auto Setup Section\n\n# Additional search path to be added\n# to the default search path\n# The search paths belong to the following libraries:\n# * core standard cell library\n# * analog I/O standard cell library\n# * digital I/O standard cell library\n# * SRAM macro library\n# * full custom macro libraries\nset ADDITIONAL_SEARCH_PATHS\t[list \\\n "$TSMC_DIR/digital/Front_End/timing_power_noise/NLDM/tcbn65lp_200a" \\\n "$TSMC_DIR/digital/Front_End/timing_power_noise/NLDM/tpan65lpnv2_140b" \\\n "$TSMC_DIR/digital/Front_End/timing_power_noise/NLDM/tpdn65lpnv2_140b" \\\n "$TSMC_DIR/sram/tsdn65lpa4096x32m8f_200b/SYNOPSYS" \\\n %s\n]\n\n# Target technology logical libraries \t\t\t\t\t\t\t\nset TARGET_LIBRARY_FILES\t[list \\\n "tcbn65lpwc.db" \\\n "tcbn65lpwc0d90d9.db" \\\n "tpan65lpnv2wc.db" \\\n "tpdn65lpnv2wc.db" \\\n "tsdn65lpa4096x32m8f_200b_tt1p2v40c.db" \\\n]\n\n# List of max min library pairs "max1 min1 max2 min2"\nset MIN_LIBRARY_FILES\t[list \\\n "tcbn65lpwc.db" "tcbn65lpbc.db" \\\n "tpan65lpnv2wc.db" "tpan65lpnv2bc.db" \\\n "tpdn65lpnv2wc.db" "tpdn65lpnv2bc.db" \\\n]\n\n# END Auto Setup Section \n##############################################################\n\n# Extra link logical libraries\nset ADDITIONAL_LINK_LIB_FILES [list \\\n %s\n]\n\n\n##############################################################\n# Topo Mode Settings\n# no auto setup implemented so far\n# please make necessary modification\n#\nset MW_REFERENCE_LIB_DIRS "$TSMC_DIR/digital/Back_End/milkyway/tcbn65lp_200a/frame_only/tcbn65lp $TSMC_DIR/digital/Back_End/milkyway/tpdn65lpnv2_140b/mt_2/9lm/frame_only/tpdn65lpnv2 $TSMC_DIR/digital/Back_End/milkyway/tpan65lpnv2_140b/mt_2/9lm/frame_only/tpan65lpnv2"; # Milkyway reference libraries\nset TECH_FILE "$TSMC_DIR/digital/Back_End/milkyway/tcbn65lp_200a/techfiles/tsmcn65_9lmT2.tf"; \t\t\t# Milkyway technology file\nset MAP_FILE "$TSMC_DIR/digital/Back_End/milkyway/tcbn65lp_200a/techfiles/tluplus/star.map_9M"; \t\t\t# Mapping file for TLUplus\nset TLUPLUS_MAX_FILE "$TSMC_DIR/digital/Back_End/milkyway/tcbn65lp_200a/techfiles/tluplus/cln65lp_1p09m+alrdl_rcworst_top2.tluplus"; \t# Max TLUplus file\nset TLUPLUS_MIN_FILE "$TSMC_DIR/digital/Back_End/milkyway/tcbn65lp_200a/techfiles/tluplus/cln65lp_1p09m+alrdl_rcbest_top2.tluplus"; \t# Min TLUplus file\nset MW_POWER_NET ""; \t\t#\nset MW_POWER_PORT ""; \t #\nset MW_GROUND_NET ""; \t #\nset MW_GROUND_PORT ""; \t #\n\n\nif {[shell_is_in_topographical_mode]} {\n\tset_preferred_routing_direction -layer M1 -dir horizontal\n\tset_preferred_routing_direction -layer M2 -dir vertical\n\tset_preferred_routing_direction -layer M3 -dir horizontal\n\tset_preferred_routing_direction -layer M4 -dir vertical\n\tset_preferred_routing_direction -layer M5 -dir horizontal\n\tset_preferred_routing_direction -layer M6 -dir vertical\n\tset_preferred_routing_direction -layer M7 -dir horizontal\n\tset_preferred_routing_direction -layer M8 -dir vertical\n\tset_preferred_routing_direction -layer M9 -dir horizontal\n\tset_preferred_routing_direction -layer AP -dir vertical\n}\n'}
dc_shell_main_tcl = '\n#\n# dc_shell script based on DC-ICPRO\n#\n\n# dc_shell_setup.tcl\nsource %s\n# sourcelist\n# the following file includes all RTL-Sources as ordered lists\nsource %s\n#########################################################################\n# Setup Variables\n#########################################################################\n#set alib_library_analysis_path $ICPRO_DIR/tmp/dc_shell ; # Point to a central cache of analyzed libraries\n\nset clock_gating_enabled 1\n\n# make design read-in a bit more verbose...\n#set hdlin_keep_signal_name user\nset hdlin_report_floating_net_to_ground true\n\n# Enables shortening of names as the concatenation of interface\n# signals results in names > 1000s of characters\nset hdlin_shorten_long_module_name true\n# Specify minimum number of characters. Default: 256\nset hdlin_module_name_limit 100\n\nset DCT_IGNORED_ROUTING_LAYERS "" ; # Enter the same ignored routing\n\t\t\t\t\t\t\t\t\t\t # layers as P&R\nset REPORTS_DIR "$BRICK_RESULTS/dc_shell_$DESIGN_NAME/reports"\nset RESULTS_DIR "$BRICK_RESULTS/dc_shell_$DESIGN_NAME/results"\nset tool "dc"\n\nset target_library $TARGET_LIBRARY_FILES\nset synthetic_library dw_foundation.sldb\nset link_library "* $target_library $ADDITIONAL_LINK_LIB_FILES $synthetic_library"\n\nset search_path [concat $search_path $ADDITIONAL_SEARCH_PATHS]\n# add default icpro search path for global verilog sources\nset user_search_path %s\nset search_path [concat $search_path $user_search_path]\n\n# Set min libraries if they exist\nforeach {max_library min_library} $MIN_LIBRARY_FILES {\nset_min_library $max_library -min_version $min_library }\n\nif {[shell_is_in_topographical_mode]} {\n set mw_logic1_net $MW_POWER_NET\n set mw_logic0_net $MW_GROUND_NET\n set mw_reference_library $MW_REFERENCE_LIB_DIRS\n set mw_design_library ${DESIGN_NAME}_LIB\n set mw_site_name_mapping [list CORE unit Core unit core unit]\n create_mw_lib -technology $TECH_FILE -mw_reference_library $mw_reference_library $mw_design_library\n open_mw_lib $mw_design_library\n set_tlu_plus_files -max_tluplus $TLUPLUS_MAX_FILE \t\t\t\t\t\t -min_tluplus $TLUPLUS_MIN_FILE \t\t\t\t\t\t -tech2itf_map $MAP_FILE\n check_tlu_plus_files\n\n check_library\n}\n\n\n## set multicore usage\nset_host_options -max_cores %s\n\n\necho "Information: Starting Synopsys Design Compiler synthesis run ... "\necho "Information: Filtered command line output. For details see \'logfiles/compile.log\'! "\n\n#################################################################################\n# Setup for Formality verification\n#################################################################################\nset_svf $RESULTS_DIR/${DESIGN_NAME}.svf\n\n\n#################################################################################\n# Read in the RTL Design\n#\n# Read in the RTL source files or read in the elaborated design (DDC).\n#################################################################################\ndefine_design_lib WORK -path ./worklib\n\nif { [llength $verilog_source_list] } {\n echo "Information: Analyzing Verilog sources ... "\n analyze -format verilog $verilog_source_list\n}\n\nif { [llength $vhdl_source_list] } {\n echo "Information: Analyzing VHDL sources ... "\n analyze -format vhdl $vhdl_source_list\n}\n\nif { [llength $systemverilog_source_list] } {\n echo "Information: Analyzing SystemVerilog sources ... "\n analyze -format sverilog $systemverilog_source_list\n}\n\necho "Information: Elaborating top-level \'$DESIGN_NAME\' ... "\nelaborate $DESIGN_NAME\n\n\nwrite -format ddc -hierarchy -output $RESULTS_DIR/${DESIGN_NAME}.elab.ddc\n\nlist_designs -show_file > $REPORTS_DIR/$DESIGN_NAME.elab.list_designs\nreport_reference -hier > $REPORTS_DIR/$DESIGN_NAME.elab.report_reference\n\n\necho "Information: Linking design ... "\nlink > $REPORTS_DIR/$DESIGN_NAME.link\n\n\n############################################################################\n# Apply Logical Design Constraints\n############################################################################\necho "Information: Reading design constraints ... "\nset constraints_file %s\nif {$constraints_file != 0} {\n source -echo -verbose ${constraints_file}\n}\n\n# Enable area optimization in all flows\nset_max_area 0\n\n############################################################################\n# Create Default Path Groups\n# Remove these path group settings if user path groups already defined\n############################################################################\nset ports_clock_root [get_ports [all_fanout -flat -clock_tree -level 0]]\ngroup_path -name REGOUT -to [all_outputs]\ngroup_path -name REGIN -from [remove_from_collection [all_inputs] $ports_clock_root]\ngroup_path -name FEEDTHROUGH -from [remove_from_collection [all_inputs] $ports_clock_root] -to [all_outputs]\n\n#################################################################################\n# Power Optimization Section\n#################################################################################\n\nif ($clock_gating_enabled) {\n\tset_clock_gating_style \t\t-positive_edge_logic integrated \t\t-negative_edge_logic integrated \t\t-control_point before \t\t-minimum_bitwidth 4 \t\t-max_fanout 8\n}\n\n#############################################################################\n# Apply Power Optimization Constraints\n#############################################################################\n# Include a SAIF file, if possible, for power optimization\n# read_saif -auto_map_names -input ${DESIGN_NAME}.saif -instance < DESIGN_INSTANCE > -verbose\nif {[shell_is_in_topographical_mode]} {\n # Enable power prediction for this DC-T session using clock tree estimation.\n set_power_prediction true\n}\n\n# set_max_leakage_power 0\n# set_max_dynamic_power 0\nset_max_total_power 0\n\nif {[shell_is_in_topographical_mode]} {\n # Specify ignored layers for routing to improve correlation\n # Use the same ignored layers that will be used during place and route\n if { $DCT_IGNORED_ROUTING_LAYERS != ""} {\n set_ignored_layers $DCT_IGNORED_ROUTING_LAYERS\n }\n report_ignored_layers\n\n # Apply Physical Design Constraints\n # set_fuzzy_query_options -hierarchical_separators {/ _ .} # -bus_name_notations {[] __ ()} # -class {cell pin port net} # -show\n #extract_physical_constraints $ICPRO_DIR/units/top/export/encounter/$DESIGN_NAME.def\n extract_physical_constraints ./$DESIGN_NAME.def\n # OR\n # source -echo -verbose ${DESIGN_NAME}.physical_constraints.tcl\n}\n\n#\n# check design\n#\necho "Information: Checking design (see \'$REPORTS_DIR/$DESIGN_NAME.check_design\'). "\n\ncheck_design > $REPORTS_DIR/$DESIGN_NAME.check_design\n\n\n#########################################################\n# Apply Additional Optimization Constraints\n#########################################################\n\n# Prevent assignment statements in the Verilog netlist.\nset verilogout_no_tri true\n\n# Uniquify design\nuniquify -dont_skip_empty_designs\n\n#########################################################\n# Compile the Design\n#\n# Recommended Options:\n#\n# -scan\n# -retime\n# -timing_high_effort_script\n# -area_high_effort_script\n#\n#########################################################\necho "Information: Starting top down compilation (compile_ultra) ... "\nremove_unconnected_ports [find cell -hierarchy *]\n\n#\n# set to true to enable\n# enable scan insertion during compilation\n#\nif { %s } {\n # compile design using scan ffs\n compile_ultra -scan\n\n #\n # modify insert_scan_script template for your DFT requirements\n #\n set insert_scan_script "./scripts/${DESIGN_NAME}.insert_scan.tcl"\n if { ! [file exists $insert_scan_script] } {\n echo "ERROR: Insert scan script \'$insert_scan_script\' not found. "\n exit 1\n } else {\n source $insert_scan_script\n }\n} else {\n # compilation without scan insertion\n\t# added option to keep hierarchy\n\n compile_ultra %s\n}\n\necho "Information: Finished top down compilation. "\n\n#################################################################################\n# Write Out Final Design\n#################################################################################\nremove_unconnected_ports [find cell -hierarchy *]\nchange_names -rules verilog -hierarchy\n\necho "Information: Writing results to \'$RESULTS_DIR\' ... "\nwrite -format ddc -hierarchy -output $RESULTS_DIR/${DESIGN_NAME}.ddc\nwrite -f verilog -hier -output $RESULTS_DIR/${DESIGN_NAME}.v\n\nif {[shell_is_in_topographical_mode]} {\n\t# write_milkyway uses: mw_logic1_net, mw_logic0_net and mw_design_library variables from dc_setup.tcl\n\t#write_milkyway -overwrite -output ${DESIGN_NAME}_DCT\n\n\twrite_physical_constraints -output ${RESULTS_DIR}/${DESIGN_NAME}.mapped.physical_constraints.tcl\n\n\t# Do not write out net RC info into SDC\n\tset write_sdc_output_lumped_net_capacitance false\n\tset write_sdc_output_net_resistance false\n}\n\n# Write SDF backannotation data\nwrite_sdf $RESULTS_DIR/${DESIGN_NAME}.sdf\nwrite_sdc -nosplit $RESULTS_DIR/${DESIGN_NAME}.sdc\n\necho "Information: Writing reports to \'$REPORTS_DIR\' ... "\n#\n# check timing/contraints\n#\nreport_design > $REPORTS_DIR/$DESIGN_NAME.report_design\ncheck_timing > $REPORTS_DIR/$DESIGN_NAME.check_timing\nreport_port > $REPORTS_DIR/$DESIGN_NAME.report_port\nreport_timing_requirements > $REPORTS_DIR/$DESIGN_NAME.report_timing_requirements\nreport_clock > $REPORTS_DIR/$DESIGN_NAME.report_clock\nreport_constraint > $REPORTS_DIR/$DESIGN_NAME.report_constraint\n\nset timing_bidirectional_pin_max_transition_checks "driver"\nreport_constraint -max_transition -all_vio >> $REPORTS_DIR/$DESIGN_NAME.report_constraint\n\nset timing_bidirectional_pin_max_transition_checks "load"\nreport_constraint -max_transition -all_vio >> $REPORTS_DIR/$DESIGN_NAME.report_constraint\n\nreport_constraints -all_violators > ${REPORTS_DIR}/${DESIGN_NAME}.report_constraints_all_violators\n\n#\n# report design\n#\nreport_timing -max_paths 10 > $REPORTS_DIR/$DESIGN_NAME.report_timing\nreport_area > $REPORTS_DIR/$DESIGN_NAME.report_area\nreport_power > $REPORTS_DIR/$DESIGN_NAME.report_power\nreport_fsm > $REPORTS_DIR/$DESIGN_NAME.report_fsm\n\nexit\n'
|
# Constants and functions for Marsaglia bits ingestion
# constants
FILE_BASE = '/media/alxfed/toca/bits.'
FILE_NUMBER_MIN = 1
FILE_NUMBER_MAX = 60
# pseudo-constants
FILE_EXTENSION = [str(i).zfill(2) for i in range(FILE_NUMBER_MIN, FILE_NUMBER_MAX + 1)]
# starts with 0 element and ends with 59, that's why the dance in the function
# pseudo-function
def file_name(n=1):
if n in range(FILE_NUMBER_MIN, FILE_NUMBER_MAX+1): # +1 because...
return FILE_BASE + FILE_EXTENSION[n-1] # -1 because...
else:
raise ValueError('There is no such file in Marsaglia set of bits')
|
file_base = '/media/alxfed/toca/bits.'
file_number_min = 1
file_number_max = 60
file_extension = [str(i).zfill(2) for i in range(FILE_NUMBER_MIN, FILE_NUMBER_MAX + 1)]
def file_name(n=1):
if n in range(FILE_NUMBER_MIN, FILE_NUMBER_MAX + 1):
return FILE_BASE + FILE_EXTENSION[n - 1]
else:
raise value_error('There is no such file in Marsaglia set of bits')
|
ENV_PARAMS = ("temperature", "salinity", "pressure", "sound_speed", "sound_absorption")
CAL_PARAMS = {
"EK": ("sa_correction", "gain_correction", "equivalent_beam_angle"),
"AZFP": ("EL", "DS", "TVR", "VTX", "equivalent_beam_angle", "Sv_offset"),
}
class CalibrateBase:
"""Class to handle calibration for all sonar models."""
def __init__(self, echodata):
self.echodata = echodata
self.env_params = None # env_params are set in child class
self.cal_params = None # cal_params are set in child class
# range_meter is computed in compute_Sv/Sp in child class
self.range_meter = None
def get_env_params(self, **kwargs):
pass
def get_cal_params(self, **kwargs):
pass
def compute_range_meter(self, **kwargs):
"""Calculate range in units meter.
Returns
-------
range_meter : xr.DataArray
range in units meter
"""
pass
def _cal_power(self, cal_type, **kwargs):
"""Calibrate power data for EK60, EK80, and AZFP.
Parameters
----------
cal_type : str
'Sv' for calculating volume backscattering strength, or
'Sp' for calculating point backscattering strength
"""
pass
def compute_Sv(self, **kwargs):
pass
def compute_Sp(self, **kwargs):
pass
def _add_params_to_output(self, ds_out):
"""Add all cal and env parameters to output Sv dataset."""
# Add env_params
for key, val in self.env_params.items():
ds_out[key] = val
# Add cal_params
for key, val in self.cal_params.items():
ds_out[key] = val
return ds_out
|
env_params = ('temperature', 'salinity', 'pressure', 'sound_speed', 'sound_absorption')
cal_params = {'EK': ('sa_correction', 'gain_correction', 'equivalent_beam_angle'), 'AZFP': ('EL', 'DS', 'TVR', 'VTX', 'equivalent_beam_angle', 'Sv_offset')}
class Calibratebase:
"""Class to handle calibration for all sonar models."""
def __init__(self, echodata):
self.echodata = echodata
self.env_params = None
self.cal_params = None
self.range_meter = None
def get_env_params(self, **kwargs):
pass
def get_cal_params(self, **kwargs):
pass
def compute_range_meter(self, **kwargs):
"""Calculate range in units meter.
Returns
-------
range_meter : xr.DataArray
range in units meter
"""
pass
def _cal_power(self, cal_type, **kwargs):
"""Calibrate power data for EK60, EK80, and AZFP.
Parameters
----------
cal_type : str
'Sv' for calculating volume backscattering strength, or
'Sp' for calculating point backscattering strength
"""
pass
def compute__sv(self, **kwargs):
pass
def compute__sp(self, **kwargs):
pass
def _add_params_to_output(self, ds_out):
"""Add all cal and env parameters to output Sv dataset."""
for (key, val) in self.env_params.items():
ds_out[key] = val
for (key, val) in self.cal_params.items():
ds_out[key] = val
return ds_out
|
# Leetcode 138. Copy List with Random Pointer
#
# Link: https://leetcode.com/problems/copy-list-with-random-pointer/
# Difficulty: Medium
# Complexity:
# O(N) time | where N represent the number of elements in the linked list
# O(1) space
"""
# Definition for a Node.
class Node:
def __init__(self, x: int, next: 'Node' = None, random: 'Node' = None):
self.val = int(x)
self.next = next
self.random = random
"""
class Solution:
def copyRandomList(self, head: 'Optional[Node]') -> 'Optional[Node]':
node = head
if not head:
return head
while node:
new_node = Node(node.val, node.next)
node.next = new_node
node = new_node.next
node = head
while node:
if node.random:
node.next.random = node.random.next
node = node.next.next
node = head
copy_head = head.next
result = copy_head
while result.next:
node.next = node.next.next
node = node.next
result.next = result.next.next
result = result.next
return copy_head
|
"""
# Definition for a Node.
class Node:
def __init__(self, x: int, next: 'Node' = None, random: 'Node' = None):
self.val = int(x)
self.next = next
self.random = random
"""
class Solution:
def copy_random_list(self, head: 'Optional[Node]') -> 'Optional[Node]':
node = head
if not head:
return head
while node:
new_node = node(node.val, node.next)
node.next = new_node
node = new_node.next
node = head
while node:
if node.random:
node.next.random = node.random.next
node = node.next.next
node = head
copy_head = head.next
result = copy_head
while result.next:
node.next = node.next.next
node = node.next
result.next = result.next.next
result = result.next
return copy_head
|
def organise(records):
# { user: {shop -> {day -> counter}}}
res = {}
for person, shop, day in records:
if person and shop and (0 < day < 8):
if person not in res:
res[person] = {}
if shop not in res[person]:
res[person][shop] = {}
if day not in res[person][shop]:
res[person][shop][day] = 0
res[person][shop][day] += 1
return res
assert(organise([]) == {})
assert(organise([('Tom', '', 5), ('Tom', 'Aldi', 4)]) == {'Tom': {'Aldi': {4: 1}}})
assert(organise([('Tom', 'Aldi', 5), ('Tom', 'Aldi', 5)]) == {'Tom': {'Aldi': {5: 2}}})
assert(organise([('Tom', 'Aldi', 1), ('Tom', 'Migros', 4), ('Jack', 'Aldi', 5)]) == {'Jack': {'Aldi': {5: 1}}, 'Tom': {'Aldi': {1: 1}, 'Migros': {4: 1}}})
|
def organise(records):
res = {}
for (person, shop, day) in records:
if person and shop and (0 < day < 8):
if person not in res:
res[person] = {}
if shop not in res[person]:
res[person][shop] = {}
if day not in res[person][shop]:
res[person][shop][day] = 0
res[person][shop][day] += 1
return res
assert organise([]) == {}
assert organise([('Tom', '', 5), ('Tom', 'Aldi', 4)]) == {'Tom': {'Aldi': {4: 1}}}
assert organise([('Tom', 'Aldi', 5), ('Tom', 'Aldi', 5)]) == {'Tom': {'Aldi': {5: 2}}}
assert organise([('Tom', 'Aldi', 1), ('Tom', 'Migros', 4), ('Jack', 'Aldi', 5)]) == {'Jack': {'Aldi': {5: 1}}, 'Tom': {'Aldi': {1: 1}, 'Migros': {4: 1}}}
|
"""
70.49%
"""
class Solution(object):
def isHappy(self, n):
"""
:type n: int
:rtype: bool
"""
if n == 0 or n == 1:
return True
x = [n]
curr = n
ishappy = False
while True:
digits = list(str(curr))
curr = 0
for d in digits:
curr += int(d) ** 2
x.append(curr)
if curr == 1:
ishappy = True
if x.count(curr) >= 2:
break
return ishappy
|
"""
70.49%
"""
class Solution(object):
def is_happy(self, n):
"""
:type n: int
:rtype: bool
"""
if n == 0 or n == 1:
return True
x = [n]
curr = n
ishappy = False
while True:
digits = list(str(curr))
curr = 0
for d in digits:
curr += int(d) ** 2
x.append(curr)
if curr == 1:
ishappy = True
if x.count(curr) >= 2:
break
return ishappy
|
"""
.. Copyright (c) 2016 Marshall Farrier
license http://opensource.org/licenses/MIT
Constants for working with an options database
"""
LOG = {
'format': "%(asctime)s %(levelname)s %(module)s.%(funcName)s : %(message)s",
'path': 'mfstockmkt/options/db'
}
DB = {
'dev': {
'name': 'test'
},
'prod': {
'name': 'optMkt'
}
}
INT_COLS = ('Vol', 'Open_Int',)
FLOAT_COLS = ('Last', 'Bid', 'Ask',)
MAX_RETRIES = {
'dev': 2,
'prod': 4,
}
|
"""
.. Copyright (c) 2016 Marshall Farrier
license http://opensource.org/licenses/MIT
Constants for working with an options database
"""
log = {'format': '%(asctime)s %(levelname)s %(module)s.%(funcName)s : %(message)s', 'path': 'mfstockmkt/options/db'}
db = {'dev': {'name': 'test'}, 'prod': {'name': 'optMkt'}}
int_cols = ('Vol', 'Open_Int')
float_cols = ('Last', 'Bid', 'Ask')
max_retries = {'dev': 2, 'prod': 4}
|
"""
File for global constants used in the program.
"""
# a constant
nsensors_taska = 5
nsensors_luke = 19
nsensors = nsensors_luke
nencoded = nsensors_luke
nfeat = 1
nelectrodes = 300
|
"""
File for global constants used in the program.
"""
nsensors_taska = 5
nsensors_luke = 19
nsensors = nsensors_luke
nencoded = nsensors_luke
nfeat = 1
nelectrodes = 300
|
'''
Convenience wrappers to make using the conf system as easy and seamless as possible
'''
def integrate(hub, imports, override=None, cli=None, roots=None, home_root=None, loader='json'):
'''
Load the conf sub and run the integrate sequence.
'''
hub.pop.sub.add('pop.mods.conf')
hub.conf.integrate.load(imports, override, cli=cli, roots=roots, home_root=home_root, loader=loader)
|
"""
Convenience wrappers to make using the conf system as easy and seamless as possible
"""
def integrate(hub, imports, override=None, cli=None, roots=None, home_root=None, loader='json'):
"""
Load the conf sub and run the integrate sequence.
"""
hub.pop.sub.add('pop.mods.conf')
hub.conf.integrate.load(imports, override, cli=cli, roots=roots, home_root=home_root, loader=loader)
|
#!/usr/bin/env python
# create pipeline
#
reader = vtk.vtkDataSetReader()
reader.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/RectGrid2.vtk")
reader.Update()
# here to force exact extent
elev = vtk.vtkElevationFilter()
elev.SetInputConnection(reader.GetOutputPort())
elev.Update()
outline = vtk.vtkRectilinearGridOutlineFilter()
outline.SetInputData(elev.GetRectilinearGridOutput())
outlineMapper = vtk.vtkPolyDataMapper()
outlineMapper.SetInputConnection(outline.GetOutputPort())
outlineMapper.SetNumberOfPieces(2)
outlineMapper.SetPiece(1)
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(outlineMapper)
outlineActor.GetProperty().SetColor(black)
# Graphics stuff
# Create the RenderWindow, Renderer and both Actors
#
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.SetMultiSamples(0)
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
#
ren1.AddActor(outlineActor)
ren1.SetBackground(1,1,1)
renWin.SetSize(400,400)
cam1 = ren1.GetActiveCamera()
cam1.SetClippingRange(3.76213,10.712)
cam1.SetFocalPoint(-0.0842503,-0.136905,0.610234)
cam1.SetPosition(2.53813,2.2678,-5.22172)
cam1.SetViewUp(-0.241047,0.930635,0.275343)
iren.Initialize()
# render the image
#
# prevent the tk window from showing up then start the event loop
# --- end of script --
|
reader = vtk.vtkDataSetReader()
reader.SetFileName('' + str(VTK_DATA_ROOT) + '/Data/RectGrid2.vtk')
reader.Update()
elev = vtk.vtkElevationFilter()
elev.SetInputConnection(reader.GetOutputPort())
elev.Update()
outline = vtk.vtkRectilinearGridOutlineFilter()
outline.SetInputData(elev.GetRectilinearGridOutput())
outline_mapper = vtk.vtkPolyDataMapper()
outlineMapper.SetInputConnection(outline.GetOutputPort())
outlineMapper.SetNumberOfPieces(2)
outlineMapper.SetPiece(1)
outline_actor = vtk.vtkActor()
outlineActor.SetMapper(outlineMapper)
outlineActor.GetProperty().SetColor(black)
ren1 = vtk.vtkRenderer()
ren_win = vtk.vtkRenderWindow()
renWin.SetMultiSamples(0)
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
ren1.AddActor(outlineActor)
ren1.SetBackground(1, 1, 1)
renWin.SetSize(400, 400)
cam1 = ren1.GetActiveCamera()
cam1.SetClippingRange(3.76213, 10.712)
cam1.SetFocalPoint(-0.0842503, -0.136905, 0.610234)
cam1.SetPosition(2.53813, 2.2678, -5.22172)
cam1.SetViewUp(-0.241047, 0.930635, 0.275343)
iren.Initialize()
|
# Find the 10001st prime using the Sieve of Eratosthenes
def sieve(n):
sieve = [True] * (n + 1)
sieve[0] = sieve[1] = False
for i in range(2, (n + 1)):
if sieve[i]:
for j in range(i*i, (n + 1), i):
sieve[j] = False
return sieve
prime_sieve = sieve(400000);
primes = []
for idx, val in enumerate(prime_sieve):
if val:
primes.append(idx)
print(primes[10000])
|
def sieve(n):
sieve = [True] * (n + 1)
sieve[0] = sieve[1] = False
for i in range(2, n + 1):
if sieve[i]:
for j in range(i * i, n + 1, i):
sieve[j] = False
return sieve
prime_sieve = sieve(400000)
primes = []
for (idx, val) in enumerate(prime_sieve):
if val:
primes.append(idx)
print(primes[10000])
|
class Node:
# Constructor to initialize the node object
def __init__(self, data):
self.data = data
self.next = None
class LinkedList:
# Function to initialize head
def __init__(self):
self.head = None
# Function to insert a new node at the beginning
def push(self, new_data):
new_node = Node(new_data)
new_node.next = self.head
self.head = new_node
# Utility function to prit the linked LinkedList
def printList(self):
temp = self.head
while(temp):
print (temp.data),
temp = temp.next
def detectLoop(self):
slow_p = self.head
fast_p = self.head
while(slow_p and fast_p and fast_p.next):
slow_p = slow_p.next
fast_p = fast_p.next.next
if slow_p == fast_p:
print ("Found Loop")
return
print ("Not Found Loop")
# Driver program for testing
llist = LinkedList()
llist.push(20)
llist.push(4)
llist.push(15)
llist.push(10)
# Create a loop for testing
llist.head.next.next.next.next = llist.head
llist.detectLoop()
|
class Node:
def __init__(self, data):
self.data = data
self.next = None
class Linkedlist:
def __init__(self):
self.head = None
def push(self, new_data):
new_node = node(new_data)
new_node.next = self.head
self.head = new_node
def print_list(self):
temp = self.head
while temp:
(print(temp.data),)
temp = temp.next
def detect_loop(self):
slow_p = self.head
fast_p = self.head
while slow_p and fast_p and fast_p.next:
slow_p = slow_p.next
fast_p = fast_p.next.next
if slow_p == fast_p:
print('Found Loop')
return
print('Not Found Loop')
llist = linked_list()
llist.push(20)
llist.push(4)
llist.push(15)
llist.push(10)
llist.head.next.next.next.next = llist.head
llist.detectLoop()
|
def bubble(alist):
for first in range(len(alist)-1,0,-1):
for sec in range(first):
if alist[sec] > alist[sec+1]:
tmp = alist[sec+1]
alist[sec+1] = alist[sec]
alist[sec] = tmp
def test_bubble():
alist = [1,7,2,5,9,12,5]
bubble(alist)
assert alist[0] == 1
assert alist[1] == 2
assert alist[6] == 12
assert alist[5] == 9
def short_bubble(alist):
exchange = True
first = len(alist) - 1
while first > 0 and exchange:
exchange = False
for sec in range(first):
if alist[sec] > alist[sec+1]:
exchange = True
tmp = alist[sec+1]
alist[sec+1] = alist[sec]
alist[sec] = tmp
first -= 1
def test_short_bubble():
alist = [1,4,5,2,19,3,7]
short_bubble(alist)
assert alist[0] == 1
assert alist[1] == 2
assert alist[6] == 19
assert alist[5] == 7
|
def bubble(alist):
for first in range(len(alist) - 1, 0, -1):
for sec in range(first):
if alist[sec] > alist[sec + 1]:
tmp = alist[sec + 1]
alist[sec + 1] = alist[sec]
alist[sec] = tmp
def test_bubble():
alist = [1, 7, 2, 5, 9, 12, 5]
bubble(alist)
assert alist[0] == 1
assert alist[1] == 2
assert alist[6] == 12
assert alist[5] == 9
def short_bubble(alist):
exchange = True
first = len(alist) - 1
while first > 0 and exchange:
exchange = False
for sec in range(first):
if alist[sec] > alist[sec + 1]:
exchange = True
tmp = alist[sec + 1]
alist[sec + 1] = alist[sec]
alist[sec] = tmp
first -= 1
def test_short_bubble():
alist = [1, 4, 5, 2, 19, 3, 7]
short_bubble(alist)
assert alist[0] == 1
assert alist[1] == 2
assert alist[6] == 19
assert alist[5] == 7
|
"""
>>> from datetime import datetime, timedelta
>>> from django.utils.timesince import timesince
>>> t = datetime(2007, 8, 14, 13, 46, 0)
>>> onemicrosecond = timedelta(microseconds=1)
>>> onesecond = timedelta(seconds=1)
>>> oneminute = timedelta(minutes=1)
>>> onehour = timedelta(hours=1)
>>> oneday = timedelta(days=1)
>>> oneweek = timedelta(days=7)
>>> onemonth = timedelta(days=30)
>>> oneyear = timedelta(days=365)
# equal datetimes.
>>> timesince(t, t)
u'0 minutes'
# Microseconds and seconds are ignored.
>>> timesince(t, t+onemicrosecond)
u'0 minutes'
>>> timesince(t, t+onesecond)
u'0 minutes'
# Test other units.
>>> timesince(t, t+oneminute)
u'1 minute'
>>> timesince(t, t+onehour)
u'1 hour'
>>> timesince(t, t+oneday)
u'1 day'
>>> timesince(t, t+oneweek)
u'1 week'
>>> timesince(t, t+onemonth)
u'1 month'
>>> timesince(t, t+oneyear)
u'1 year'
# Test multiple units.
>>> timesince(t, t+2*oneday+6*onehour)
u'2 days, 6 hours'
>>> timesince(t, t+2*oneweek+2*oneday)
u'2 weeks, 2 days'
# If the two differing units aren't adjacent, only the first unit is displayed.
>>> timesince(t, t+2*oneweek+3*onehour+4*oneminute)
u'2 weeks'
>>> timesince(t, t+4*oneday+5*oneminute)
u'4 days'
# When the second date occurs before the first, we should always get 0 minutes.
>>> timesince(t, t-onemicrosecond)
u'0 minutes'
>>> timesince(t, t-onesecond)
u'0 minutes'
>>> timesince(t, t-oneminute)
u'0 minutes'
>>> timesince(t, t-onehour)
u'0 minutes'
>>> timesince(t, t-oneday)
u'0 minutes'
>>> timesince(t, t-oneweek)
u'0 minutes'
>>> timesince(t, t-onemonth)
u'0 minutes'
>>> timesince(t, t-oneyear)
u'0 minutes'
>>> timesince(t, t-2*oneday-6*onehour)
u'0 minutes'
>>> timesince(t, t-2*oneweek-2*oneday)
u'0 minutes'
>>> timesince(t, t-2*oneweek-3*onehour-4*oneminute)
u'0 minutes'
>>> timesince(t, t-4*oneday-5*oneminute)
u'0 minutes'
"""
|
"""
>>> from datetime import datetime, timedelta
>>> from django.utils.timesince import timesince
>>> t = datetime(2007, 8, 14, 13, 46, 0)
>>> onemicrosecond = timedelta(microseconds=1)
>>> onesecond = timedelta(seconds=1)
>>> oneminute = timedelta(minutes=1)
>>> onehour = timedelta(hours=1)
>>> oneday = timedelta(days=1)
>>> oneweek = timedelta(days=7)
>>> onemonth = timedelta(days=30)
>>> oneyear = timedelta(days=365)
# equal datetimes.
>>> timesince(t, t)
u'0 minutes'
# Microseconds and seconds are ignored.
>>> timesince(t, t+onemicrosecond)
u'0 minutes'
>>> timesince(t, t+onesecond)
u'0 minutes'
# Test other units.
>>> timesince(t, t+oneminute)
u'1 minute'
>>> timesince(t, t+onehour)
u'1 hour'
>>> timesince(t, t+oneday)
u'1 day'
>>> timesince(t, t+oneweek)
u'1 week'
>>> timesince(t, t+onemonth)
u'1 month'
>>> timesince(t, t+oneyear)
u'1 year'
# Test multiple units.
>>> timesince(t, t+2*oneday+6*onehour)
u'2 days, 6 hours'
>>> timesince(t, t+2*oneweek+2*oneday)
u'2 weeks, 2 days'
# If the two differing units aren't adjacent, only the first unit is displayed.
>>> timesince(t, t+2*oneweek+3*onehour+4*oneminute)
u'2 weeks'
>>> timesince(t, t+4*oneday+5*oneminute)
u'4 days'
# When the second date occurs before the first, we should always get 0 minutes.
>>> timesince(t, t-onemicrosecond)
u'0 minutes'
>>> timesince(t, t-onesecond)
u'0 minutes'
>>> timesince(t, t-oneminute)
u'0 minutes'
>>> timesince(t, t-onehour)
u'0 minutes'
>>> timesince(t, t-oneday)
u'0 minutes'
>>> timesince(t, t-oneweek)
u'0 minutes'
>>> timesince(t, t-onemonth)
u'0 minutes'
>>> timesince(t, t-oneyear)
u'0 minutes'
>>> timesince(t, t-2*oneday-6*onehour)
u'0 minutes'
>>> timesince(t, t-2*oneweek-2*oneday)
u'0 minutes'
>>> timesince(t, t-2*oneweek-3*onehour-4*oneminute)
u'0 minutes'
>>> timesince(t, t-4*oneday-5*oneminute)
u'0 minutes'
"""
|
#
# PySNMP MIB module TPLINK-ETHERNETOAM-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/TPLINK-ETHERNETOAM-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:17:08 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
ModuleIdentity, Gauge32, IpAddress, MibIdentifier, Integer32, Bits, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, NotificationType, Counter64, TimeTicks, iso, Counter32, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "Gauge32", "IpAddress", "MibIdentifier", "Integer32", "Bits", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "NotificationType", "Counter64", "TimeTicks", "iso", "Counter32", "Unsigned32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
tplinkMgmt, = mibBuilder.importSymbols("TPLINK-MIB", "tplinkMgmt")
tplinkEthernetOam = ModuleIdentity((1, 3, 6, 1, 4, 1, 11863, 6, 60))
tplinkEthernetOam.setRevisions(('2015-07-06 10:30',))
if mibBuilder.loadTexts: tplinkEthernetOam.setLastUpdated('201507061030Z')
if mibBuilder.loadTexts: tplinkEthernetOam.setOrganization('TPLINK')
tplinkEthernetOamMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1))
tplinkEthernetOamMIBNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 2))
ethernetOamBasicConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 1))
ethernetOamLinkMonConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 2))
ethernetOamRfiConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 3))
ethernetOamRmtLbConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 4))
ethernetOamDiscoveryInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 5))
ethernetOamStatistics = MibIdentifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 6))
ethernetOamEventLog = MibIdentifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 7))
mibBuilder.exportSymbols("TPLINK-ETHERNETOAM-MIB", ethernetOamBasicConfig=ethernetOamBasicConfig, ethernetOamStatistics=ethernetOamStatistics, ethernetOamDiscoveryInfo=ethernetOamDiscoveryInfo, ethernetOamLinkMonConfig=ethernetOamLinkMonConfig, ethernetOamRfiConfig=ethernetOamRfiConfig, ethernetOamEventLog=ethernetOamEventLog, tplinkEthernetOamMIBObjects=tplinkEthernetOamMIBObjects, PYSNMP_MODULE_ID=tplinkEthernetOam, ethernetOamRmtLbConfig=ethernetOamRmtLbConfig, tplinkEthernetOamMIBNotifications=tplinkEthernetOamMIBNotifications, tplinkEthernetOam=tplinkEthernetOam)
|
(object_identifier, integer, octet_string) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'Integer', 'OctetString')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(value_range_constraint, single_value_constraint, value_size_constraint, constraints_intersection, constraints_union) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueRangeConstraint', 'SingleValueConstraint', 'ValueSizeConstraint', 'ConstraintsIntersection', 'ConstraintsUnion')
(notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance')
(module_identity, gauge32, ip_address, mib_identifier, integer32, bits, object_identity, mib_scalar, mib_table, mib_table_row, mib_table_column, notification_type, counter64, time_ticks, iso, counter32, unsigned32) = mibBuilder.importSymbols('SNMPv2-SMI', 'ModuleIdentity', 'Gauge32', 'IpAddress', 'MibIdentifier', 'Integer32', 'Bits', 'ObjectIdentity', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'NotificationType', 'Counter64', 'TimeTicks', 'iso', 'Counter32', 'Unsigned32')
(textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString')
(tplink_mgmt,) = mibBuilder.importSymbols('TPLINK-MIB', 'tplinkMgmt')
tplink_ethernet_oam = module_identity((1, 3, 6, 1, 4, 1, 11863, 6, 60))
tplinkEthernetOam.setRevisions(('2015-07-06 10:30',))
if mibBuilder.loadTexts:
tplinkEthernetOam.setLastUpdated('201507061030Z')
if mibBuilder.loadTexts:
tplinkEthernetOam.setOrganization('TPLINK')
tplink_ethernet_oam_mib_objects = mib_identifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1))
tplink_ethernet_oam_mib_notifications = mib_identifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 2))
ethernet_oam_basic_config = mib_identifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 1))
ethernet_oam_link_mon_config = mib_identifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 2))
ethernet_oam_rfi_config = mib_identifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 3))
ethernet_oam_rmt_lb_config = mib_identifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 4))
ethernet_oam_discovery_info = mib_identifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 5))
ethernet_oam_statistics = mib_identifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 6))
ethernet_oam_event_log = mib_identifier((1, 3, 6, 1, 4, 1, 11863, 6, 60, 1, 7))
mibBuilder.exportSymbols('TPLINK-ETHERNETOAM-MIB', ethernetOamBasicConfig=ethernetOamBasicConfig, ethernetOamStatistics=ethernetOamStatistics, ethernetOamDiscoveryInfo=ethernetOamDiscoveryInfo, ethernetOamLinkMonConfig=ethernetOamLinkMonConfig, ethernetOamRfiConfig=ethernetOamRfiConfig, ethernetOamEventLog=ethernetOamEventLog, tplinkEthernetOamMIBObjects=tplinkEthernetOamMIBObjects, PYSNMP_MODULE_ID=tplinkEthernetOam, ethernetOamRmtLbConfig=ethernetOamRmtLbConfig, tplinkEthernetOamMIBNotifications=tplinkEthernetOamMIBNotifications, tplinkEthernetOam=tplinkEthernetOam)
|
class ParticleInstanceModifier:
axis = None
object = None
particle_system_index = None
position = None
random_position = None
show_alive = None
show_dead = None
show_unborn = None
use_children = None
use_normal = None
use_path = None
use_preserve_shape = None
use_size = None
|
class Particleinstancemodifier:
axis = None
object = None
particle_system_index = None
position = None
random_position = None
show_alive = None
show_dead = None
show_unborn = None
use_children = None
use_normal = None
use_path = None
use_preserve_shape = None
use_size = None
|
def FlagsForFile(filename, **kwargs):
return {
'flags': ['-x', 'c++', '-std=c++14', '-Wall', '-Wextra', '-Werror'
,'-I','C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Tools/MSVC/14.13.26128/include'
,'-I','C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Tools/MSVC/14.13.26128/atlmfc/include'
,'-I','C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Auxiliary/VS/include'
,'-I','C:/Program Files (x86)/Windows Kits/10/Include/10.0.16299.0/ucrt'
,'-I','C:/Program Files (x86)/Windows Kits/10/Include/10.0.16299.0/um'
,'-I','C:/Program Files (x86)/Windows Kits/10/Include/10.0.16299.0/shared'
,'-I','C:/Program Files (x86)/Windows Kits/10/Include/10.0.16299.0/winrt'
,'-I','C:/Program Files (x86)/Windows Kits/NETFXSDK/4.6.1/Include/um']
}
|
def flags_for_file(filename, **kwargs):
return {'flags': ['-x', 'c++', '-std=c++14', '-Wall', '-Wextra', '-Werror', '-I', 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Tools/MSVC/14.13.26128/include', '-I', 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Tools/MSVC/14.13.26128/atlmfc/include', '-I', 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Community/VC/Auxiliary/VS/include', '-I', 'C:/Program Files (x86)/Windows Kits/10/Include/10.0.16299.0/ucrt', '-I', 'C:/Program Files (x86)/Windows Kits/10/Include/10.0.16299.0/um', '-I', 'C:/Program Files (x86)/Windows Kits/10/Include/10.0.16299.0/shared', '-I', 'C:/Program Files (x86)/Windows Kits/10/Include/10.0.16299.0/winrt', '-I', 'C:/Program Files (x86)/Windows Kits/NETFXSDK/4.6.1/Include/um']}
|
n = int(input('Digite um valor para calcular seu fatorial: '))
f = 1
print(f'Calculando {n}! = ', end =' ')
for i in range(1, n+1):
print(f'{n}', end = ' ')
print(' x ' if n > 1 else ' = ', end = ' ')
f *= i
n -= 1
print(f)
|
n = int(input('Digite um valor para calcular seu fatorial: '))
f = 1
print(f'Calculando {n}! = ', end=' ')
for i in range(1, n + 1):
print(f'{n}', end=' ')
print(' x ' if n > 1 else ' = ', end=' ')
f *= i
n -= 1
print(f)
|
class TestTable:
def __init__(self):
self.id = None
self.code = None
DDLCOMMAND = """
CREATE TABLE TestTable (
id INTEGER CONSTRAINT pk_role PRIMARY KEY,
code INTEGER
);
"""
|
class Testtable:
def __init__(self):
self.id = None
self.code = None
ddlcommand = '\n CREATE TABLE TestTable (\n id INTEGER CONSTRAINT pk_role PRIMARY KEY,\n code INTEGER\n );\n '
|
def most_common(lst):
return max(lst, key=lst.count)
|
def most_common(lst):
return max(lst, key=lst.count)
|
# https://leetcode.com/problems/defanging-an-ip-address
class Solution:
def defangIPaddr(self, address):
if not address:
return ""
ls = address.split(".")
return "[.]".join(ls)
|
class Solution:
def defang_i_paddr(self, address):
if not address:
return ''
ls = address.split('.')
return '[.]'.join(ls)
|
BOT = "b"
EMPTY = "-"
DIRT = "d"
def dist(pos1, pos2):
return abs(pos1[0] - pos2[0]) + abs(pos1[1] - pos2[1])
def is_dirt(pos, board):
if min(pos) < 0:
return False
if pos[0] >= len(board):
return False
if pos[1] >= len(board[0]):
return False
if board[pos[0]][pos[1]] != DIRT:
return False
return True
def find_closest_dirt(pos, board, max_distance=0):
if not max_distance:
max_distance = len(board) + len(board[0])
n_moves = 1
start = (pos[0] + n_moves, pos[1])
curr = list(start)
dx, dy = -1, 1
while n_moves <= max_distance:
if is_dirt(curr, board):
return curr
curr[0] += dx
curr[1] += dy
if curr[0] == pos[0]:
dy = -dy
if curr[1] == pos[1]:
dx = -dx
if curr == list(start):
n_moves += 1
start = (pos[0] + n_moves, pos[1])
curr = list(start)
def _next_move(pos, board):
if board[pos[0]][pos[1]] == DIRT:
return "CLEAN"
closest = find_closest_dirt(pos, board)
vector = [closest[0] - pos[0], closest[1] - pos[1]]
dir = ""
if vector[0]:
dir = "DOWN" if vector[0] > 0 else "UP"
else:
dir = "RIGHT" if vector[1] > 0 else "LEFT"
return dir
|
bot = 'b'
empty = '-'
dirt = 'd'
def dist(pos1, pos2):
return abs(pos1[0] - pos2[0]) + abs(pos1[1] - pos2[1])
def is_dirt(pos, board):
if min(pos) < 0:
return False
if pos[0] >= len(board):
return False
if pos[1] >= len(board[0]):
return False
if board[pos[0]][pos[1]] != DIRT:
return False
return True
def find_closest_dirt(pos, board, max_distance=0):
if not max_distance:
max_distance = len(board) + len(board[0])
n_moves = 1
start = (pos[0] + n_moves, pos[1])
curr = list(start)
(dx, dy) = (-1, 1)
while n_moves <= max_distance:
if is_dirt(curr, board):
return curr
curr[0] += dx
curr[1] += dy
if curr[0] == pos[0]:
dy = -dy
if curr[1] == pos[1]:
dx = -dx
if curr == list(start):
n_moves += 1
start = (pos[0] + n_moves, pos[1])
curr = list(start)
def _next_move(pos, board):
if board[pos[0]][pos[1]] == DIRT:
return 'CLEAN'
closest = find_closest_dirt(pos, board)
vector = [closest[0] - pos[0], closest[1] - pos[1]]
dir = ''
if vector[0]:
dir = 'DOWN' if vector[0] > 0 else 'UP'
else:
dir = 'RIGHT' if vector[1] > 0 else 'LEFT'
return dir
|
SMALL = 1
MEDIUM = 2
LARGE = 3
ORDER_SIZE = (
(SMALL, u'Small'),
(MEDIUM, u'Medium'),
(LARGE, u'Large')
)
MARGARITA = 1
MARINARA = 2
SALAMI = 3
ORDER_TITLE = (
(1, u'margarita'),
(2, u'marinara'),
(3, u'salami')
)
RECEIVED = 1
IN_PROCESS = 2
OUT_FOR_DELIVERY = 3
DELIVERED = 4
RETURNED = 5
ORDER_STATUS = (
(RECEIVED, u'Received'),
(IN_PROCESS, u'In Process'),
(OUT_FOR_DELIVERY, u'Out For Delivery'),
(DELIVERED, u'Delivered'),
(RETURNED, u'Returned')
)
|
small = 1
medium = 2
large = 3
order_size = ((SMALL, u'Small'), (MEDIUM, u'Medium'), (LARGE, u'Large'))
margarita = 1
marinara = 2
salami = 3
order_title = ((1, u'margarita'), (2, u'marinara'), (3, u'salami'))
received = 1
in_process = 2
out_for_delivery = 3
delivered = 4
returned = 5
order_status = ((RECEIVED, u'Received'), (IN_PROCESS, u'In Process'), (OUT_FOR_DELIVERY, u'Out For Delivery'), (DELIVERED, u'Delivered'), (RETURNED, u'Returned'))
|
# I decided to write a code that generates data filtering object from a list of keyword parameters:
class Filter:
"""
Helper filter class. Accepts a list of single-argument
functions that return True if object in list conforms to some criteria
"""
def __init__(self, functions):
self.functions = functions
def apply(self, data):
return [
item for item in data
if all(i(item) for i in self.functions)
]
# example of usage:
# positive_even = Filter(lamba a: a % 2 == 0, lambda a: a > 0, lambda a: isinstance(int, a)))
# positive_even.apply(range(100)) should return only even numbers from 0 to 99
def make_filter(**keywords):
"""
Generate filter object for specified keywords
"""
filter_funcs = []
for key, value in keywords.items():
def keyword_filter_func(value):
return value[key] == value
filter_funcs.append(keyword_filter_func)
return Filter(filter_funcs)
sample_data = [
{
"name": "Bill",
"last_name": "Gilbert",
"occupation": "was here",
"type": "person",
},
{
"is_dead": True,
"kind": "parrot",
"type": "bird",
"name": "polly"
}
]
# make_filter(name='polly', type='bird').apply(sample_data) should return only second entry from the list
# There are multiple bugs in this code. Find them all and write tests for faulty cases.
|
class Filter:
"""
Helper filter class. Accepts a list of single-argument
functions that return True if object in list conforms to some criteria
"""
def __init__(self, functions):
self.functions = functions
def apply(self, data):
return [item for item in data if all((i(item) for i in self.functions))]
def make_filter(**keywords):
"""
Generate filter object for specified keywords
"""
filter_funcs = []
for (key, value) in keywords.items():
def keyword_filter_func(value):
return value[key] == value
filter_funcs.append(keyword_filter_func)
return filter(filter_funcs)
sample_data = [{'name': 'Bill', 'last_name': 'Gilbert', 'occupation': 'was here', 'type': 'person'}, {'is_dead': True, 'kind': 'parrot', 'type': 'bird', 'name': 'polly'}]
|
def fatorial(n):
num = 1
while n >= 1:
num = num * n
n = n - 1
return num
digit = int(input('numero para mostra o fatorial '))
print(fatorial(digit))
|
def fatorial(n):
num = 1
while n >= 1:
num = num * n
n = n - 1
return num
digit = int(input('numero para mostra o fatorial '))
print(fatorial(digit))
|
# Time: O(n^2)
# Space: O(n)
class Solution(object):
def minSkips(self, dist, speed, hoursBefore):
"""
:type dist: List[int]
:type speed: int
:type hoursBefore: int
:rtype: int
"""
def ceil(a, b):
return (a+b-1)//b
dp = [0]*((len(dist)-1)+1) # dp[i]: (min time by i skips) * speed
for i, d in enumerate(dist):
for j in reversed(xrange(len(dp))):
dp[j] = ceil(dp[j]+d, speed)*speed if i < len(dist)-1 else dp[j]+d
if j-1 >= 0:
dp[j] = min(dp[j], dp[j-1]+d)
target = hoursBefore*speed
for i in xrange(len(dist)):
if dp[i] <= target:
return i
return -1
|
class Solution(object):
def min_skips(self, dist, speed, hoursBefore):
"""
:type dist: List[int]
:type speed: int
:type hoursBefore: int
:rtype: int
"""
def ceil(a, b):
return (a + b - 1) // b
dp = [0] * (len(dist) - 1 + 1)
for (i, d) in enumerate(dist):
for j in reversed(xrange(len(dp))):
dp[j] = ceil(dp[j] + d, speed) * speed if i < len(dist) - 1 else dp[j] + d
if j - 1 >= 0:
dp[j] = min(dp[j], dp[j - 1] + d)
target = hoursBefore * speed
for i in xrange(len(dist)):
if dp[i] <= target:
return i
return -1
|
#!/usr/bin/python3
cars = ['bmw', 'audi', 'toyota', 'subaru']
cars.sort()
print(cars)
cars.sort(reverse=True)
print(cars)
print("\nHere is the sorted list:")
print(sorted(cars))
print("\nHere is the sorted list(reverse=True):")
print(sorted(cars, reverse = True))
print("\nHere is the original list:")
print(cars)
print(len(cars))
|
cars = ['bmw', 'audi', 'toyota', 'subaru']
cars.sort()
print(cars)
cars.sort(reverse=True)
print(cars)
print('\nHere is the sorted list:')
print(sorted(cars))
print('\nHere is the sorted list(reverse=True):')
print(sorted(cars, reverse=True))
print('\nHere is the original list:')
print(cars)
print(len(cars))
|
#
# PySNMP MIB module CISCO-CBP-TARGET-TC-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-CBP-TARGET-TC-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:52:40 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, ModuleIdentity, IpAddress, NotificationType, Gauge32, Integer32, Unsigned32, iso, Bits, ObjectIdentity, MibIdentifier, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "ModuleIdentity", "IpAddress", "NotificationType", "Gauge32", "Integer32", "Unsigned32", "iso", "Bits", "ObjectIdentity", "MibIdentifier", "Counter64")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
ciscoCbpTargetTCMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 511))
ciscoCbpTargetTCMIB.setRevisions(('2006-03-24 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoCbpTargetTCMIB.setRevisionsDescriptions(('Initial version.',))
if mibBuilder.loadTexts: ciscoCbpTargetTCMIB.setLastUpdated('200603240000Z')
if mibBuilder.loadTexts: ciscoCbpTargetTCMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoCbpTargetTCMIB.setContactInfo(' Cisco Systems Customer Service Postal: 170 W. Tasman Drive San Jose, CA 95134-1706 USA Tel: +1 800 553-NETS E-mail: [email protected], [email protected]')
if mibBuilder.loadTexts: ciscoCbpTargetTCMIB.setDescription('This MIB module defines Textual Conventions for representing targets which have class based policy mappings. A target can be any logical interface or entity to which a class based policy is able to be associated.')
class CcbptTargetType(TextualConvention, Integer32):
description = 'A Textual Convention that represents a type of target. genIf(1) A target of type interface defined by CcbptTargetIdIf Textual Convention. atmPvc(2) A target of type ATM PVC defined by the CcbptTargetIdAtmPvc Textual Convention. frDlci(3) A target of type Frame Relay DLCI defined by the CcbptTargetIdFrDlci Textual Convention. entity(4) A target of type entity defined by the CcbptTargetIdEntity Textual Convention. This target type is used to indicate the attachment of a Class Based Policy to a physical entity. fwZone(5) A target of type Firewall Security Zone defined by the CcbptTargetIdNameString Textual Convention. fwZonePair(6) A target of type Firewall Security Zone defined by the CcbptTargetIdNameString Textual Convention. aaaSession(7) A target of type AAA Session define by the CcbptTargetIdAaaSession Textual Convention. '
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))
namedValues = NamedValues(("genIf", 1), ("atmPvc", 2), ("frDlci", 3), ("entity", 4), ("fwZone", 5), ("fwZonePair", 6), ("aaaSession", 7))
class CcbptTargetDirection(TextualConvention, Integer32):
description = 'A Textual Convention that represents a direction for a target. undirected(1) Indicates that direction has no meaning relative to the target. input(2) Refers to the input direction relative to the target. output(3) Refers to the output direction relative to the target. inOut(4) Refers to both the input and output directions relative to the target. '
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("undirected", 1), ("input", 2), ("output", 3), ("inOut", 4))
class CcbptTargetId(TextualConvention, OctetString):
description = 'Denotes a generic target ID. A CcbptTargetId value is always interpreted within the context of an CcbptTargetType value. Every usage of the CcbptTargetId Textual Convention is required to specify the CcbptTargetType object which provides the context. It is suggested that the CcbptTargetType object is logically registered before the object(s) which use the CcbptTargetId Textual Convention if they appear in the same logical row. The value of an CcbptTargetId object must always be consistent with the value of the associated CcbptTargetType object. Attempts to set a CcbptTargetId object to a value which is inconsistent with the associated targetType must fail with an inconsistentValue error. '
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 64)
class CcbptTargetIdIf(TextualConvention, OctetString):
description = 'Represents an interface target: octets contents encoding 1-4 ifIndex network-byte order '
status = 'current'
displayHint = '4d'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(4, 4)
fixedLength = 4
class CcbptTargetIdAtmPvc(TextualConvention, OctetString):
description = 'Represents an ATM PVC target: octets contents encoding 1-4 ifIndex network-byte order 5-6 atmVclVpi network-byte order 7-8 atmVclVci network-byte order '
status = 'current'
displayHint = '4d:2d:2d'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(8, 8)
fixedLength = 8
class CcbptTargetIdFrDlci(TextualConvention, OctetString):
description = 'Represents a Frame Relay DLCI target: octets contents encoding 1-4 ifIndex network-byte order 5-6 DlciNumber network-byte order '
status = 'current'
displayHint = '4d:2d'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(6, 6)
fixedLength = 6
class CcbptTargetIdEntity(TextualConvention, OctetString):
description = 'Represents the entPhysicalIndex of the physical entity target: octets contents encoding 1-4 entPhysicalIndex network-byte order '
status = 'current'
displayHint = '4d'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(4, 4)
fixedLength = 4
class CcbptTargetIdNameString(TextualConvention, OctetString):
description = 'Represents a target identified by a name string. This is the ASCII name identifying this target. '
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 64)
class CcbptTargetIdAaaSession(TextualConvention, OctetString):
description = 'Represents a AAA Session target: octets contents encoding 1-4 casnSessionId network-byte order '
status = 'current'
displayHint = '4d'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(4, 4)
fixedLength = 4
class CcbptPolicySourceType(TextualConvention, Integer32):
description = 'This Textual Convention represents the types of sources of policies. ciscoCbQos(1) Cisco Class Based QOS policy source. The source of the policy is Cisco Class Based QOS specific. ciscoCbpCommon(2) Cisco Common Class Based Policy type. The source of the policy is Cisco Common Class Based. '
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("ciscoCbQos", 1), ("ciscoCbpBase", 2))
class CcbptPolicyIdentifier(TextualConvention, Unsigned32):
description = 'A type specific, arbitrary identifier uniquely given to a policy-map attachment to a target. '
status = 'current'
subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(1, 4294967295)
class CcbptPolicyIdentifierOrZero(TextualConvention, Unsigned32):
description = 'This refers to CcbptPolicyIdentifier values, as applies, or 0. The behavior of the value of 0 should be described in the description of objects using this type. '
status = 'current'
subtypeSpec = Unsigned32.subtypeSpec + ValueRangeConstraint(0, 4294967295)
mibBuilder.exportSymbols("CISCO-CBP-TARGET-TC-MIB", PYSNMP_MODULE_ID=ciscoCbpTargetTCMIB, CcbptTargetIdEntity=CcbptTargetIdEntity, CcbptPolicySourceType=CcbptPolicySourceType, CcbptTargetDirection=CcbptTargetDirection, CcbptTargetIdAaaSession=CcbptTargetIdAaaSession, ciscoCbpTargetTCMIB=ciscoCbpTargetTCMIB, CcbptTargetIdIf=CcbptTargetIdIf, CcbptTargetIdFrDlci=CcbptTargetIdFrDlci, CcbptTargetId=CcbptTargetId, CcbptTargetIdNameString=CcbptTargetIdNameString, CcbptTargetType=CcbptTargetType, CcbptTargetIdAtmPvc=CcbptTargetIdAtmPvc, CcbptPolicyIdentifierOrZero=CcbptPolicyIdentifierOrZero, CcbptPolicyIdentifier=CcbptPolicyIdentifier)
|
(integer, octet_string, object_identifier) = mibBuilder.importSymbols('ASN1', 'Integer', 'OctetString', 'ObjectIdentifier')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(value_range_constraint, constraints_intersection, value_size_constraint, single_value_constraint, constraints_union) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueRangeConstraint', 'ConstraintsIntersection', 'ValueSizeConstraint', 'SingleValueConstraint', 'ConstraintsUnion')
(cisco_mgmt,) = mibBuilder.importSymbols('CISCO-SMI', 'ciscoMgmt')
(module_compliance, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup')
(counter32, mib_scalar, mib_table, mib_table_row, mib_table_column, time_ticks, module_identity, ip_address, notification_type, gauge32, integer32, unsigned32, iso, bits, object_identity, mib_identifier, counter64) = mibBuilder.importSymbols('SNMPv2-SMI', 'Counter32', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'TimeTicks', 'ModuleIdentity', 'IpAddress', 'NotificationType', 'Gauge32', 'Integer32', 'Unsigned32', 'iso', 'Bits', 'ObjectIdentity', 'MibIdentifier', 'Counter64')
(textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString')
cisco_cbp_target_tcmib = module_identity((1, 3, 6, 1, 4, 1, 9, 9, 511))
ciscoCbpTargetTCMIB.setRevisions(('2006-03-24 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts:
ciscoCbpTargetTCMIB.setRevisionsDescriptions(('Initial version.',))
if mibBuilder.loadTexts:
ciscoCbpTargetTCMIB.setLastUpdated('200603240000Z')
if mibBuilder.loadTexts:
ciscoCbpTargetTCMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts:
ciscoCbpTargetTCMIB.setContactInfo(' Cisco Systems Customer Service Postal: 170 W. Tasman Drive San Jose, CA 95134-1706 USA Tel: +1 800 553-NETS E-mail: [email protected], [email protected]')
if mibBuilder.loadTexts:
ciscoCbpTargetTCMIB.setDescription('This MIB module defines Textual Conventions for representing targets which have class based policy mappings. A target can be any logical interface or entity to which a class based policy is able to be associated.')
class Ccbpttargettype(TextualConvention, Integer32):
description = 'A Textual Convention that represents a type of target. genIf(1) A target of type interface defined by CcbptTargetIdIf Textual Convention. atmPvc(2) A target of type ATM PVC defined by the CcbptTargetIdAtmPvc Textual Convention. frDlci(3) A target of type Frame Relay DLCI defined by the CcbptTargetIdFrDlci Textual Convention. entity(4) A target of type entity defined by the CcbptTargetIdEntity Textual Convention. This target type is used to indicate the attachment of a Class Based Policy to a physical entity. fwZone(5) A target of type Firewall Security Zone defined by the CcbptTargetIdNameString Textual Convention. fwZonePair(6) A target of type Firewall Security Zone defined by the CcbptTargetIdNameString Textual Convention. aaaSession(7) A target of type AAA Session define by the CcbptTargetIdAaaSession Textual Convention. '
status = 'current'
subtype_spec = Integer32.subtypeSpec + constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6, 7))
named_values = named_values(('genIf', 1), ('atmPvc', 2), ('frDlci', 3), ('entity', 4), ('fwZone', 5), ('fwZonePair', 6), ('aaaSession', 7))
class Ccbpttargetdirection(TextualConvention, Integer32):
description = 'A Textual Convention that represents a direction for a target. undirected(1) Indicates that direction has no meaning relative to the target. input(2) Refers to the input direction relative to the target. output(3) Refers to the output direction relative to the target. inOut(4) Refers to both the input and output directions relative to the target. '
status = 'current'
subtype_spec = Integer32.subtypeSpec + constraints_union(single_value_constraint(1, 2, 3, 4))
named_values = named_values(('undirected', 1), ('input', 2), ('output', 3), ('inOut', 4))
class Ccbpttargetid(TextualConvention, OctetString):
description = 'Denotes a generic target ID. A CcbptTargetId value is always interpreted within the context of an CcbptTargetType value. Every usage of the CcbptTargetId Textual Convention is required to specify the CcbptTargetType object which provides the context. It is suggested that the CcbptTargetType object is logically registered before the object(s) which use the CcbptTargetId Textual Convention if they appear in the same logical row. The value of an CcbptTargetId object must always be consistent with the value of the associated CcbptTargetType object. Attempts to set a CcbptTargetId object to a value which is inconsistent with the associated targetType must fail with an inconsistentValue error. '
status = 'current'
subtype_spec = OctetString.subtypeSpec + value_size_constraint(0, 64)
class Ccbpttargetidif(TextualConvention, OctetString):
description = 'Represents an interface target: octets contents encoding 1-4 ifIndex network-byte order '
status = 'current'
display_hint = '4d'
subtype_spec = OctetString.subtypeSpec + value_size_constraint(4, 4)
fixed_length = 4
class Ccbpttargetidatmpvc(TextualConvention, OctetString):
description = 'Represents an ATM PVC target: octets contents encoding 1-4 ifIndex network-byte order 5-6 atmVclVpi network-byte order 7-8 atmVclVci network-byte order '
status = 'current'
display_hint = '4d:2d:2d'
subtype_spec = OctetString.subtypeSpec + value_size_constraint(8, 8)
fixed_length = 8
class Ccbpttargetidfrdlci(TextualConvention, OctetString):
description = 'Represents a Frame Relay DLCI target: octets contents encoding 1-4 ifIndex network-byte order 5-6 DlciNumber network-byte order '
status = 'current'
display_hint = '4d:2d'
subtype_spec = OctetString.subtypeSpec + value_size_constraint(6, 6)
fixed_length = 6
class Ccbpttargetidentity(TextualConvention, OctetString):
description = 'Represents the entPhysicalIndex of the physical entity target: octets contents encoding 1-4 entPhysicalIndex network-byte order '
status = 'current'
display_hint = '4d'
subtype_spec = OctetString.subtypeSpec + value_size_constraint(4, 4)
fixed_length = 4
class Ccbpttargetidnamestring(TextualConvention, OctetString):
description = 'Represents a target identified by a name string. This is the ASCII name identifying this target. '
status = 'current'
subtype_spec = OctetString.subtypeSpec + value_size_constraint(0, 64)
class Ccbpttargetidaaasession(TextualConvention, OctetString):
description = 'Represents a AAA Session target: octets contents encoding 1-4 casnSessionId network-byte order '
status = 'current'
display_hint = '4d'
subtype_spec = OctetString.subtypeSpec + value_size_constraint(4, 4)
fixed_length = 4
class Ccbptpolicysourcetype(TextualConvention, Integer32):
description = 'This Textual Convention represents the types of sources of policies. ciscoCbQos(1) Cisco Class Based QOS policy source. The source of the policy is Cisco Class Based QOS specific. ciscoCbpCommon(2) Cisco Common Class Based Policy type. The source of the policy is Cisco Common Class Based. '
status = 'current'
subtype_spec = Integer32.subtypeSpec + constraints_union(single_value_constraint(1, 2))
named_values = named_values(('ciscoCbQos', 1), ('ciscoCbpBase', 2))
class Ccbptpolicyidentifier(TextualConvention, Unsigned32):
description = 'A type specific, arbitrary identifier uniquely given to a policy-map attachment to a target. '
status = 'current'
subtype_spec = Unsigned32.subtypeSpec + value_range_constraint(1, 4294967295)
class Ccbptpolicyidentifierorzero(TextualConvention, Unsigned32):
description = 'This refers to CcbptPolicyIdentifier values, as applies, or 0. The behavior of the value of 0 should be described in the description of objects using this type. '
status = 'current'
subtype_spec = Unsigned32.subtypeSpec + value_range_constraint(0, 4294967295)
mibBuilder.exportSymbols('CISCO-CBP-TARGET-TC-MIB', PYSNMP_MODULE_ID=ciscoCbpTargetTCMIB, CcbptTargetIdEntity=CcbptTargetIdEntity, CcbptPolicySourceType=CcbptPolicySourceType, CcbptTargetDirection=CcbptTargetDirection, CcbptTargetIdAaaSession=CcbptTargetIdAaaSession, ciscoCbpTargetTCMIB=ciscoCbpTargetTCMIB, CcbptTargetIdIf=CcbptTargetIdIf, CcbptTargetIdFrDlci=CcbptTargetIdFrDlci, CcbptTargetId=CcbptTargetId, CcbptTargetIdNameString=CcbptTargetIdNameString, CcbptTargetType=CcbptTargetType, CcbptTargetIdAtmPvc=CcbptTargetIdAtmPvc, CcbptPolicyIdentifierOrZero=CcbptPolicyIdentifierOrZero, CcbptPolicyIdentifier=CcbptPolicyIdentifier)
|
o = c50 = c20 = c10 = r = 0
while True:
o = int(input('Quanto voce deseja sacar? '))
r = o
while r >= 50:
c50 += 1
r = r - 50
while r >= 20:
c20 += 1
r = r - 20
while r > 10:
c10 += 1
r = r - 10
if r < 10:
break
print(f'Para o valor de R$ {o}')
if c50 >0:
print(f'Foram emitidas {c50} cedulas de R$ 50')
if c20>0:
print(f'Foram emitidas {c20} cedulas de R$ 20')
if c10>0:
print(f'Foram emitidas {c10} cedulas de R$ 10')
if r>0:
print(f'Foram emitidas {r} cedulas de R$ 1')
|
o = c50 = c20 = c10 = r = 0
while True:
o = int(input('Quanto voce deseja sacar? '))
r = o
while r >= 50:
c50 += 1
r = r - 50
while r >= 20:
c20 += 1
r = r - 20
while r > 10:
c10 += 1
r = r - 10
if r < 10:
break
print(f'Para o valor de R$ {o}')
if c50 > 0:
print(f'Foram emitidas {c50} cedulas de R$ 50')
if c20 > 0:
print(f'Foram emitidas {c20} cedulas de R$ 20')
if c10 > 0:
print(f'Foram emitidas {c10} cedulas de R$ 10')
if r > 0:
print(f'Foram emitidas {r} cedulas de R$ 1')
|
__version__ = "3.12.1"
CTX_PROFILE = "PROFILE"
CTX_DEFAULT_PROFILE = "default"
|
__version__ = '3.12.1'
ctx_profile = 'PROFILE'
ctx_default_profile = 'default'
|
"""Mark this test directory as a package.
See https://github.com/python/mypy/issues/4008 for more info.
"""
|
"""Mark this test directory as a package.
See https://github.com/python/mypy/issues/4008 for more info.
"""
|
"""
{{package}} module.
---------------
{{description}}
Author: {{author}}
Email: {{email}}
"""
|
"""
{{package}} module.
---------------
{{description}}
Author: {{author}}
Email: {{email}}
"""
|
class Query:
def __init__(self, data, res={}):
self.res = res
self.data = data
def clear(self):
self.res = {}
def get(self):
return self.res
def documentAtTime(self, query, model):
self.data.clearScore()
# pLists = {}
query = query.split()
queryTermCount = {}
# pointer = {}
for term in query:
if term in queryTermCount:
queryTermCount[term] += 1
else:
queryTermCount[term] = 1
# def skipTo(maxDocId, pointer, term):
# while pointer[term] < len(pLists[term]) and pLists[term][pointer[term]].getDocId() < maxDocId:
# pointer[term] += 1
for doc in sorted(self.data.documents.values(), key=lambda x: x.docId):
score = 0
for term in query:
if doc.docId in self.data.invertedIndex.db[term] :
score += model(term, doc, self.data, queryTermCount)
doc.score = score
return list(sorted(self.data.documents.values(), key=lambda x: x.score, reverse=True))
|
class Query:
def __init__(self, data, res={}):
self.res = res
self.data = data
def clear(self):
self.res = {}
def get(self):
return self.res
def document_at_time(self, query, model):
self.data.clearScore()
query = query.split()
query_term_count = {}
for term in query:
if term in queryTermCount:
queryTermCount[term] += 1
else:
queryTermCount[term] = 1
for doc in sorted(self.data.documents.values(), key=lambda x: x.docId):
score = 0
for term in query:
if doc.docId in self.data.invertedIndex.db[term]:
score += model(term, doc, self.data, queryTermCount)
doc.score = score
return list(sorted(self.data.documents.values(), key=lambda x: x.score, reverse=True))
|
'''
__init__.py
ColorPy is a Python package to convert physical descriptions of light:
spectra of light intensity vs. wavelength - into RGB colors that can
be drawn on a computer screen.
It provides a nice set of attractive plots that you can make of such
spectra, and some other color related functions as well.
License:
Copyright (C) 2008 Mark Kness
Author - Mark Kness - [email protected]
This file is part of ColorPy.
ColorPy is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
ColorPy is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with ColorPy. If not, see <http://www.gnu.org/licenses/>.
'''
# This file only needs to exist to indicate that this is a package.
|
"""
__init__.py
ColorPy is a Python package to convert physical descriptions of light:
spectra of light intensity vs. wavelength - into RGB colors that can
be drawn on a computer screen.
It provides a nice set of attractive plots that you can make of such
spectra, and some other color related functions as well.
License:
Copyright (C) 2008 Mark Kness
Author - Mark Kness - [email protected]
This file is part of ColorPy.
ColorPy is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
ColorPy is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with ColorPy. If not, see <http://www.gnu.org/licenses/>.
"""
|
"""Module `true`
Declares four functions:
* `isTrue`: return True if b is equal to True, return False otherwise
* `isFalse`: return True if b is equal to False, return False otherwise
* `isNotTrue`: return True if b is not equal to True, return False otherwise
* `isNotFalse`: return True if b is not equal to False, return False otherwise
Typical usage:
>>> x = 5
>>> if isTrue(x == 5) == True :
... print("hello world")
hello world
"""
def isTrue (b) :
"""return True if b is equal to True, return False otherwise
>>> isTrue(True)
True
>>> isTrue(False)
False
>>> isTrue("hello world")
False
"""
if b is True or b == True :
# base case: b equals to True => return True
return True
else :
# otherwise: solve the problem recursively
return isTrue(not b) == (False or ...)
def isFalse (b) :
"""return True if b is equal to False, return False otherwise
>>> isFalse(False)
True
>>> isFalse(True)
False
>>> isFalse("hello world")
False
"""
# this is very similar to isTrue
if b is False or b == False :
# base case: b equals to False => return False
return True
else :
# otherwise: solve the problem recursively
return isFalse(not b) == (False or ...)
def isNotTrue (b) :
"""return True if b is not equal to True, return False otherwise
>>> isNotTrue(True)
False
>>> isNotTrue(False)
True
>>> isNotTrue("hello world")
True
"""
# take care: not(X or Y) is (not X) and (not Y)
if b is not True and b != True :
# base case: b not equals to True => return True
return True
else :
# otherwise: solve the problem recursively
return isNotTrue(not b) == (False or ...)
def isNotFalse (b) :
"""return True if b is not equal to False, return False otherwise
>>> isNotFalse(False)
False
>>> isNotFalse(True)
True
>>> isNotFalse("hello world")
True
"""
# this is very similar to isNotTrue
if b is not False and b != False :
# base case: b equals to False => return False
return True
else :
# otherwise: solve the problem recursively
return isNotFalse(not b) == (False or ...)
|
"""Module `true`
Declares four functions:
* `isTrue`: return True if b is equal to True, return False otherwise
* `isFalse`: return True if b is equal to False, return False otherwise
* `isNotTrue`: return True if b is not equal to True, return False otherwise
* `isNotFalse`: return True if b is not equal to False, return False otherwise
Typical usage:
>>> x = 5
>>> if isTrue(x == 5) == True :
... print("hello world")
hello world
"""
def is_true(b):
"""return True if b is equal to True, return False otherwise
>>> isTrue(True)
True
>>> isTrue(False)
False
>>> isTrue("hello world")
False
"""
if b is True or b == True:
return True
else:
return is_true(not b) == (False or ...)
def is_false(b):
"""return True if b is equal to False, return False otherwise
>>> isFalse(False)
True
>>> isFalse(True)
False
>>> isFalse("hello world")
False
"""
if b is False or b == False:
return True
else:
return is_false(not b) == (False or ...)
def is_not_true(b):
"""return True if b is not equal to True, return False otherwise
>>> isNotTrue(True)
False
>>> isNotTrue(False)
True
>>> isNotTrue("hello world")
True
"""
if b is not True and b != True:
return True
else:
return is_not_true(not b) == (False or ...)
def is_not_false(b):
"""return True if b is not equal to False, return False otherwise
>>> isNotFalse(False)
False
>>> isNotFalse(True)
True
>>> isNotFalse("hello world")
True
"""
if b is not False and b != False:
return True
else:
return is_not_false(not b) == (False or ...)
|
# run python from an operating system command prmpt
# type the following:
msg = "Hello World"
print(msg)
# write it into a file called hello.py
# open a command prompt and type "python hello.py"
# this should run the script and print "Hello World"
# vscode alternative; Run the following in a terminal
# by selecting the two lines and pressing Shift+Enter
# vscode alternative 2; you can right click and select
# "Run selection/line in Python terminal" from the context menu
# You'll notice that the terminal window is still open, you can also type
# code there that will be immediately interpreted (i.e. try a different message)
# N.B. the underscore (this only makes sense on an interpreter command line)
# an underscore represents the result of the last unassigned statement on the
# command line:
# 3 + 5
# print(_) <-- will be 8
|
msg = 'Hello World'
print(msg)
|
# Backtracking
# def totalNQueens(self, n: int) -> int:
# diag1 = set()
# diag2 = set()
# used_cols = set()
#
# return self.helper(n, diag1, diag2, used_cols, 0)
#
#
# def helper(self, n, diag1, diag2, used_cols, row):
# if row == n:
# return 1
#
# solutions = 0
#
# for col in range(n):
# if row + col in diag1 or row - col in diag2 or col in used_cols:
# continue
#
# diag1.add(row + col)
# diag2.add(row - col)
# used_cols.add(col)
#
# solutions += self.helper(n, diag1, diag2, used_cols, row + 1)
#
# diag1.remove(row + col)
# diag2.remove(row - col)
# used_cols.remove(col)
#
# return solutions
# DFS
def totalNQueens(self, n):
self.res = 0
self.dfs([-1] * n, 0)
return self.res
def dfs(self, nums, index):
if index == len(nums):
self.res += 1
return # backtracking
for i in range(len(nums)):
nums[index] = i
if self.valid(nums, index):
self.dfs(nums, index + 1)
def valid(self, nums, n):
for i in range(n):
if nums[i] == nums[n] or abs(nums[n] - nums[i]) == n - i:
return False
return True
|
def total_n_queens(self, n):
self.res = 0
self.dfs([-1] * n, 0)
return self.res
def dfs(self, nums, index):
if index == len(nums):
self.res += 1
return
for i in range(len(nums)):
nums[index] = i
if self.valid(nums, index):
self.dfs(nums, index + 1)
def valid(self, nums, n):
for i in range(n):
if nums[i] == nums[n] or abs(nums[n] - nums[i]) == n - i:
return False
return True
|
#########################
# EECS1015: Lab 9
# Name: Mahfuz Rahman
# Student ID: 217847518
# Email: [email protected]
#########################
class MinMaxList:
def __init__(self, initializeList):
self.listData = initializeList
self.listData.sort()
def insertItem(self, item, printResult=False/True):
if len(self.listData) == 0: # Inserting item if list is empty
self.listData.append(item)
print(f"Item ({item}) inserted at location 0")
MinMaxList.printList(self)
elif item >= self.listData[-1]: # Inserting item at the last index if item is largest in the list
self.listData.append(item)
print(f"Item ({item}) inserted at location {len(self.listData)-1}")
MinMaxList.printList(self)
else:
for i in range(len(self.listData)+1): # Extracting each item in the list and comparing it
if item <= self.listData[i]:
self.listData.insert(i, item)
if printResult == True:
print(f"Item ({item}) inserted at location {i}")
MinMaxList.printList(self)
break
def getMin(self):
result = self.listData[0]
del self.listData[0]
return result
def getMax(self):
result = self.listData[-1]
del self.listData[-1]
return result
def printList(self):
print(self.listData)
|
class Minmaxlist:
def __init__(self, initializeList):
self.listData = initializeList
self.listData.sort()
def insert_item(self, item, printResult=False / True):
if len(self.listData) == 0:
self.listData.append(item)
print(f'Item ({item}) inserted at location 0')
MinMaxList.printList(self)
elif item >= self.listData[-1]:
self.listData.append(item)
print(f'Item ({item}) inserted at location {len(self.listData) - 1}')
MinMaxList.printList(self)
else:
for i in range(len(self.listData) + 1):
if item <= self.listData[i]:
self.listData.insert(i, item)
if printResult == True:
print(f'Item ({item}) inserted at location {i}')
MinMaxList.printList(self)
break
def get_min(self):
result = self.listData[0]
del self.listData[0]
return result
def get_max(self):
result = self.listData[-1]
del self.listData[-1]
return result
def print_list(self):
print(self.listData)
|
'''
1560. Most Visited Sector in a Circular Track
Given an integer n and an integer array rounds. We have a circular track which consists of n sectors labeled from 1 to n. A marathon will be held on this track, the marathon consists of m rounds. The ith round starts at sector rounds[i - 1] and ends at sector rounds[i]. For example, round 1 starts at sector rounds[0] and ends at sector rounds[1]
Return an array of the most visited sectors sorted in ascending order.
Notice that you circulate the track in ascending order of sector numbers in the counter-clockwise direction (See the first example).
Example 1:
Input: n = 4, rounds = [1,3,1,2]
Output: [1,2]
Explanation: The marathon starts at sector 1. The order of the visited sectors is as follows:
1 --> 2 --> 3 (end of round 1) --> 4 --> 1 (end of round 2) --> 2 (end of round 3 and the marathon)
We can see that both sectors 1 and 2 are visited twice and they are the most visited sectors. Sectors 3 and 4 are visited only once.
Example 2:
Input: n = 2, rounds = [2,1,2,1,2,1,2,1,2]
Output: [2]
Example 3:
Input: n = 7, rounds = [1,3,5,7]
Output: [1,2,3,4,5,6,7]
Constraints:
2 <= n <= 100
1 <= m <= 100
rounds.length == m + 1
1 <= rounds[i] <= n
rounds[i] != rounds[i + 1] for 0 <= i < m
'''
class Solution:
def mostVisited(self, n: int, rounds: List[int]) -> List[int]:
arr = [0] * n
for i in range(1, len(rounds)):
end = rounds[i]
beg = rounds[i-1] + ( 0 if i == 1 else 1 )
if end < beg:
end = end + n
for j in range(beg, end+1):
arr[ j%n -1 ] += 1
# print( arr )
ret = []
maxNum = max(arr)
for i in range(len(arr)):
if arr[i] == maxNum:
ret.append(i+1)
return ret
'''
4
[1,3,1,2]
2
[2,1,2,1,2,1,2,1,2]
7
[1,3,5,7]
'''
|
"""
1560. Most Visited Sector in a Circular Track
Given an integer n and an integer array rounds. We have a circular track which consists of n sectors labeled from 1 to n. A marathon will be held on this track, the marathon consists of m rounds. The ith round starts at sector rounds[i - 1] and ends at sector rounds[i]. For example, round 1 starts at sector rounds[0] and ends at sector rounds[1]
Return an array of the most visited sectors sorted in ascending order.
Notice that you circulate the track in ascending order of sector numbers in the counter-clockwise direction (See the first example).
Example 1:
Input: n = 4, rounds = [1,3,1,2]
Output: [1,2]
Explanation: The marathon starts at sector 1. The order of the visited sectors is as follows:
1 --> 2 --> 3 (end of round 1) --> 4 --> 1 (end of round 2) --> 2 (end of round 3 and the marathon)
We can see that both sectors 1 and 2 are visited twice and they are the most visited sectors. Sectors 3 and 4 are visited only once.
Example 2:
Input: n = 2, rounds = [2,1,2,1,2,1,2,1,2]
Output: [2]
Example 3:
Input: n = 7, rounds = [1,3,5,7]
Output: [1,2,3,4,5,6,7]
Constraints:
2 <= n <= 100
1 <= m <= 100
rounds.length == m + 1
1 <= rounds[i] <= n
rounds[i] != rounds[i + 1] for 0 <= i < m
"""
class Solution:
def most_visited(self, n: int, rounds: List[int]) -> List[int]:
arr = [0] * n
for i in range(1, len(rounds)):
end = rounds[i]
beg = rounds[i - 1] + (0 if i == 1 else 1)
if end < beg:
end = end + n
for j in range(beg, end + 1):
arr[j % n - 1] += 1
ret = []
max_num = max(arr)
for i in range(len(arr)):
if arr[i] == maxNum:
ret.append(i + 1)
return ret
' \n4\n[1,3,1,2]\n2\n[2,1,2,1,2,1,2,1,2]\n7\n[1,3,5,7]\n'
|
while True:
X, Y = map(int, input().split())
if X == 0 or Y == 0:
break
else:
if X > 0 and Y > 0:
print('primeiro')
elif X > 0 and Y < 0:
print('quarto')
elif X < 0 and Y < 0:
print('terceiro')
else:
print('segundo')
|
while True:
(x, y) = map(int, input().split())
if X == 0 or Y == 0:
break
elif X > 0 and Y > 0:
print('primeiro')
elif X > 0 and Y < 0:
print('quarto')
elif X < 0 and Y < 0:
print('terceiro')
else:
print('segundo')
|
# A Caesar cypher is a weak form on encryption:
# It involves "rotating" each letter by a number (shift it through the alphabet)
#
# Example:
# A rotated by 3 is D; Z rotated by 1 is A
# In a SF movie the computer is called HAL, which is IBM rotated by -1
#
# Our function rotate_word() uses:
# ord (char to code_number)
# chr (code to char)
def encrypt(word, no):
rotated = ""
for i in range(len(word)):
j = ord(word[i]) + no
rotated = rotated + chr(j)
return rotated
def decrypt(word, no):
return encrypt(word, -no)
assert encrypt("abc", 3) == "def" # pass
assert encrypt("IBM", -1) == "HAL" # pass
assert decrypt("def", 3) == "abc" # pass
assert decrypt("HAL", -1) == "IBM" # pass
|
def encrypt(word, no):
rotated = ''
for i in range(len(word)):
j = ord(word[i]) + no
rotated = rotated + chr(j)
return rotated
def decrypt(word, no):
return encrypt(word, -no)
assert encrypt('abc', 3) == 'def'
assert encrypt('IBM', -1) == 'HAL'
assert decrypt('def', 3) == 'abc'
assert decrypt('HAL', -1) == 'IBM'
|
expected_output = {
"chassis_mac_address": "4ce1.7592.a700",
"mac_wait_time": "Indefinite",
"redun_port_type": "FIBRE",
"chassis_index": {
1: {
"role": "Active",
"mac_address": "4ce1.7592.a700",
"priority": 2,
"hw_version": "V02",
"current_state": "Ready",
"ip_address": "169.254.138.6",
"rmi_ip": "10.8.138.6"
},
2: {
"role": "Standby",
"mac_address": "4ce1.7592.9000",
"priority": 1,
"hw_version": "V02",
"current_state": "Ready",
"ip_address": "169.254.138.7",
"rmi_ip": "10.8.138.7"
}
}
}
|
expected_output = {'chassis_mac_address': '4ce1.7592.a700', 'mac_wait_time': 'Indefinite', 'redun_port_type': 'FIBRE', 'chassis_index': {1: {'role': 'Active', 'mac_address': '4ce1.7592.a700', 'priority': 2, 'hw_version': 'V02', 'current_state': 'Ready', 'ip_address': '169.254.138.6', 'rmi_ip': '10.8.138.6'}, 2: {'role': 'Standby', 'mac_address': '4ce1.7592.9000', 'priority': 1, 'hw_version': 'V02', 'current_state': 'Ready', 'ip_address': '169.254.138.7', 'rmi_ip': '10.8.138.7'}}}
|
def read_instance(f):
print(f)
file = open(f)
width = int(file.readline())
n_circuits = int(file.readline())
DX = []
DY = []
for i in range(n_circuits):
piece = file.readline()
split_piece = piece.strip().split(" ")
DX.append(int(split_piece[0]))
DY.append(int(split_piece[1]))
return width, n_circuits, DX, DY
def write_instance(width, n_circuits, DX, DY, out_path="./file.dzn"):
file = open(out_path, mode="w")
file.write(f"width = {width};\n")
file.write(f"n = {n_circuits};\n")
file.write(f"DX = {DX};\n")
file.write(f"DY = {DY};")
file.close()
|
def read_instance(f):
print(f)
file = open(f)
width = int(file.readline())
n_circuits = int(file.readline())
dx = []
dy = []
for i in range(n_circuits):
piece = file.readline()
split_piece = piece.strip().split(' ')
DX.append(int(split_piece[0]))
DY.append(int(split_piece[1]))
return (width, n_circuits, DX, DY)
def write_instance(width, n_circuits, DX, DY, out_path='./file.dzn'):
file = open(out_path, mode='w')
file.write(f'width = {width};\n')
file.write(f'n = {n_circuits};\n')
file.write(f'DX = {DX};\n')
file.write(f'DY = {DY};')
file.close()
|
n = 1024
while n > 0:
print(n)
n //= 2
|
n = 1024
while n > 0:
print(n)
n //= 2
|
#!/usr/bin/env python3
title: str = 'Lady'
name: str = 'Gaga'
# Lady Gaga is an American actress, singer and songwriter.
# String concatination at its worst
print(title + ' ' + name + ' is an American actress, singer and songwriter.')
# Legacy example
print('%s %s is an American actress, singer and songwriter.' % (title, name))
# Python 3 (and 2.7) encourages str.format() function.
print('{} {} is an American actress, singer and songwriter.'.format(title, name)) # noqa E501
print('{0} {1} is an American actress, singer and songwriter.'.format(title, name)) # noqa E501
print('{title} {name} is an American actress, singer and songwriter.'.format(title=title, name=name)) # noqa E501
print('{title} {name} is an American actress, singer and songwriter.'.format(name=name, title=title)) # noqa E501
# From Python 3.6 onwards you can use String interpolation aka f-strings.
# So now, this is the recommended way to format strings.
print(f'{title} {name} is an American actress, singer and songwriter.')
# f-string also works with inline code
six: int = 6
seven: int = 7
print(f'What do you get if you multiply {six} by {seven}?: {six * seven}')
|
title: str = 'Lady'
name: str = 'Gaga'
print(title + ' ' + name + ' is an American actress, singer and songwriter.')
print('%s %s is an American actress, singer and songwriter.' % (title, name))
print('{} {} is an American actress, singer and songwriter.'.format(title, name))
print('{0} {1} is an American actress, singer and songwriter.'.format(title, name))
print('{title} {name} is an American actress, singer and songwriter.'.format(title=title, name=name))
print('{title} {name} is an American actress, singer and songwriter.'.format(name=name, title=title))
print(f'{title} {name} is an American actress, singer and songwriter.')
six: int = 6
seven: int = 7
print(f'What do you get if you multiply {six} by {seven}?: {six * seven}')
|
"""
Client Error HTTP Status Callables
"""
def HTTP404(environ, start_response):
"""
HTTP 404 Response
"""
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
return ['']
def HTTP405(environ, start_response):
"""
HTTP 405 Response
"""
start_response('405 METHOD NOT ALLOWED', [('Content-Type', 'text/plain')])
return ['']
|
"""
Client Error HTTP Status Callables
"""
def http404(environ, start_response):
"""
HTTP 404 Response
"""
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
return ['']
def http405(environ, start_response):
"""
HTTP 405 Response
"""
start_response('405 METHOD NOT ALLOWED', [('Content-Type', 'text/plain')])
return ['']
|
class Solution:
def mySqrt(self, x: int) -> int:
if x == 0:
return 0
ans: float = x
tolerance = 0.00000001
while abs(ans - x/ans) > tolerance:
ans = (ans + x/ans) / 2
return int(ans)
tests = [
(
(4,),
2,
),
(
(8,),
2,
),
]
|
class Solution:
def my_sqrt(self, x: int) -> int:
if x == 0:
return 0
ans: float = x
tolerance = 1e-08
while abs(ans - x / ans) > tolerance:
ans = (ans + x / ans) / 2
return int(ans)
tests = [((4,), 2), ((8,), 2)]
|
if traffic_light == 'green':
pass # to implement
else:
stop()
|
if traffic_light == 'green':
pass
else:
stop()
|
# -*- coding: utf-8 -*-
# Copyright 2021 Cohesity Inc.
class TypeVmDirEntryEnum(object):
"""Implementation of the 'Type_VmDirEntry' enum.
DirEntryType is the type of entry i.e. file/folder.
Specifies the type of directory entry.
'kFile' indicates that current entry is of file type.
'kDirectory' indicates that current entry is of directory type.
'kSymlink' indicates that current entry is of symbolic link.
Attributes:
KFILE: TODO: type description here.
KDIRECTORY: TODO: type description here.
KSYMLINK: TODO: type description here.
"""
KFILE = 'kFile'
KDIRECTORY = 'kDirectory'
KSYMLINK = 'kSymlink'
|
class Typevmdirentryenum(object):
"""Implementation of the 'Type_VmDirEntry' enum.
DirEntryType is the type of entry i.e. file/folder.
Specifies the type of directory entry.
'kFile' indicates that current entry is of file type.
'kDirectory' indicates that current entry is of directory type.
'kSymlink' indicates that current entry is of symbolic link.
Attributes:
KFILE: TODO: type description here.
KDIRECTORY: TODO: type description here.
KSYMLINK: TODO: type description here.
"""
kfile = 'kFile'
kdirectory = 'kDirectory'
ksymlink = 'kSymlink'
|
if window.get_active_title() == "Terminal":
keyboard.send_keys("<super>+z")
else:
keyboard.send_keys("<ctrl>+z")
|
if window.get_active_title() == 'Terminal':
keyboard.send_keys('<super>+z')
else:
keyboard.send_keys('<ctrl>+z')
|
def naive_4() :
it = 0
def get_w() :
fan_in = 1
fan_out = 1
limit = np.sqrt(6 / (fan_in + fan_out))
return np.random.uniform(low =-limit , high = limit , size = (784 , 10))
w_init = get_w()
b_init = np,zeros(shape(10,))
last_score = 0.0
learnin_rate = 0.1
while True :
w = w_init + learning_rate * get_w()
model.set_weights(weights = [w,b_init] )
score = mode.evalute(x_test ,y_test , verbose = 0 ) [1]
if score > last_score :
w_init = w
last_sccore = score
print(it , "Best Acc" , score )
score = model.evalute(x_test , y_test , verbose = 0 ) [1]
if score > last_score :
b_init = b
last_sccore = score
print(it , "Best Acc" , score )
it +=1
|
def naive_4():
it = 0
def get_w():
fan_in = 1
fan_out = 1
limit = np.sqrt(6 / (fan_in + fan_out))
return np.random.uniform(low=-limit, high=limit, size=(784, 10))
w_init = get_w()
b_init = (np, zeros(shape(10)))
last_score = 0.0
learnin_rate = 0.1
while True:
w = w_init + learning_rate * get_w()
model.set_weights(weights=[w, b_init])
score = mode.evalute(x_test, y_test, verbose=0)[1]
if score > last_score:
w_init = w
last_sccore = score
print(it, 'Best Acc', score)
score = model.evalute(x_test, y_test, verbose=0)[1]
if score > last_score:
b_init = b
last_sccore = score
print(it, 'Best Acc', score)
it += 1
|
n = int(input())
for _ in range(n):
recording = input()
sounds = []
s = input()
while s != "what does the fox say?":
sounds.append(s.split(' ')[-1])
s = input()
fox_says = ""
for sound in recording.split(' '):
if sound not in sounds:
fox_says += " " + sound
print(fox_says.strip())
|
n = int(input())
for _ in range(n):
recording = input()
sounds = []
s = input()
while s != 'what does the fox say?':
sounds.append(s.split(' ')[-1])
s = input()
fox_says = ''
for sound in recording.split(' '):
if sound not in sounds:
fox_says += ' ' + sound
print(fox_says.strip())
|
count = 0
while count != 5:
count += 1
print(count)
print('--------')
counter = 0
while counter <= 20:
counter = counter + 3
print(counter)
print('--------')
countersito = 20
while countersito >= 0:
countersito -= 3
print(countersito)
|
count = 0
while count != 5:
count += 1
print(count)
print('--------')
counter = 0
while counter <= 20:
counter = counter + 3
print(counter)
print('--------')
countersito = 20
while countersito >= 0:
countersito -= 3
print(countersito)
|
def netmiko_command(device, command=None, ckwargs={}, **kwargs):
if command:
output = device['nc'].send_command(command, **ckwargs)
else:
output = 'No command to run.'
return output
|
def netmiko_command(device, command=None, ckwargs={}, **kwargs):
if command:
output = device['nc'].send_command(command, **ckwargs)
else:
output = 'No command to run.'
return output
|
# ----------------------------------------------------------------------
#
# Brad T. Aagaard, U.S. Geological Survey
#
# This code was developed as part of the Computational Infrastructure
# for Geodynamics (http://geodynamics.org).
#
# Copyright (c) 2010-2017 University of California, Davis
#
# See COPYING for license information.
#
# ----------------------------------------------------------------------
#
# @file spatialdata/geocoords/__init__.py
#
# @brief Python spatialdata geocoords module initialization.
__all__ = [
'CoordSys',
'CSCart',
'CSGeo',
'Converter',
]
# End of file
|
__all__ = ['CoordSys', 'CSCart', 'CSGeo', 'Converter']
|
########
# PART 1
def read(filename):
with open("event2021/day25/" + filename, "r") as file:
rows = [line.strip() for line in file.readlines()]
return [ch for row in rows for ch in row], len(rows[0]), len(rows)
def draw(region_data):
region, width, height = region_data
for y in range(height):
for x in range(width):
print(region[(y * width) + x], end="")
print()
print()
def find_standstill(region_data, debug = False, should_draw = None, max_steps = -1):
region, width, height = region_data
steps = 0
while True:
if debug:
print("step", steps)
if should_draw and should_draw(steps):
draw((region, width, height))
if steps == max_steps:
return None
stepped = False
new_region = region[:]
for y in range(height):
for x in range(width):
ch = region[(y * width) + x]
if ch == '>':
if region[(y * width) + ((x + 1) % width)] == '.':
new_region[(y * width) + x] = '.'
new_region[(y * width) + ((x + 1) % width)] = '>'
stepped = True
region = new_region
new_region = region[:]
for y in range(height):
for x in range(width):
ch = region[(y * width) + x]
if ch == 'v':
if region[((y + 1) % height) * width + x] == '.':
new_region[(y * width) + x] = '.'
new_region[((y + 1) % height) * width + x] = 'v'
stepped = True
steps += 1
if not stepped:
break
region = new_region
return steps
#ex1 = read("example1.txt")
#find_standstill(ex1, True, lambda _: True, max_steps=4)
ex2 = read("example2.txt")
assert find_standstill(ex2, True, lambda x: x in [0, 1, 2, 3, 4, 5, 10, 20, 30, 40, 50, 55, 56, 57, 58, 59], 60) == 58
inp = read("input.txt")
answer = find_standstill(inp)
print("Part 1 =", answer)
assert answer == 456 # check with accepted answer
|
def read(filename):
with open('event2021/day25/' + filename, 'r') as file:
rows = [line.strip() for line in file.readlines()]
return ([ch for row in rows for ch in row], len(rows[0]), len(rows))
def draw(region_data):
(region, width, height) = region_data
for y in range(height):
for x in range(width):
print(region[y * width + x], end='')
print()
print()
def find_standstill(region_data, debug=False, should_draw=None, max_steps=-1):
(region, width, height) = region_data
steps = 0
while True:
if debug:
print('step', steps)
if should_draw and should_draw(steps):
draw((region, width, height))
if steps == max_steps:
return None
stepped = False
new_region = region[:]
for y in range(height):
for x in range(width):
ch = region[y * width + x]
if ch == '>':
if region[y * width + (x + 1) % width] == '.':
new_region[y * width + x] = '.'
new_region[y * width + (x + 1) % width] = '>'
stepped = True
region = new_region
new_region = region[:]
for y in range(height):
for x in range(width):
ch = region[y * width + x]
if ch == 'v':
if region[(y + 1) % height * width + x] == '.':
new_region[y * width + x] = '.'
new_region[(y + 1) % height * width + x] = 'v'
stepped = True
steps += 1
if not stepped:
break
region = new_region
return steps
ex2 = read('example2.txt')
assert find_standstill(ex2, True, lambda x: x in [0, 1, 2, 3, 4, 5, 10, 20, 30, 40, 50, 55, 56, 57, 58, 59], 60) == 58
inp = read('input.txt')
answer = find_standstill(inp)
print('Part 1 =', answer)
assert answer == 456
|
# This sample tests the "reportPrivateUsage" feature.
class _TestClass(object):
pass
class TestClass(object):
def __init__(self):
self._priv1 = 1
|
class _Testclass(object):
pass
class Testclass(object):
def __init__(self):
self._priv1 = 1
|
# Copyright (c) 2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def ok():
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:e7ef8254-10a6-11e4-b86d-becafe000bed">
<data/>
</rpc-reply>"""
return res
def show_dhcp(port):
dhcp = ("ip source binding 10.0.0.1 FFFF.FFFF.FFFF.FFFF "
"vlan 1 interface port-channel%s") % (port)
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:4a9be8b4-df85-11e3-ab20-becafe000bed">
<data>
!Command: show running-config dhcp | egrep port-channel%(port)s$
!Time: Mon May 19 18:40:08 2014
version 6.0(2)U2(4)
interface port-channel%(port)s
%(dhcp)s
</data>
</rpc-reply>"""
return res % ({'port': port,
'dhcp': dhcp})
def show_port_channel_config_trunked(port):
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:4a9be8b4-df85-11e3-ab20-becafe000bed">
<data>
!Command: show running-config interface port-channel%(port)s
!Time: Mon May 19 18:40:08 2014
version 6.0(2)U2(4)
interface port-channel%(port)s
description CUST39a8365c-3b84-4169-bc1a-1efa3ab20e04-host
switchport mode trunk
switchport trunk allowed vlan 1,2
ip verify source dhcp-snooping-vlan
spanning-tree port type edge trunk
no negotiate auto
vpc %(port)s
</data>
</rpc-reply>"""
return res % ({'port': port})
def show_ethernet_config_trunked(port):
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:4a9be8b4-df85-11e3-ab20-becafe000bed">
<data>
!Command: show running-config interface Ethernet1/%(port)s
!Time: Mon May 19 18:40:08 2014
version 6.0(2)U2(4)
interface Ethernet1/%(port)s
description CUST39a8365c-3b84-4169-bc1a-1efa3ab20e04-host
no lldp transmit
switchport mode trunk
switchport trunk allowed vlan 1,2
spanning-tree port type edge trunk
spanning-tree bpduguard enable
channel-group %(port)s mode active
</data>
</rpc-reply>"""
return res % ({'port': port})
def show_ethernet_config_access(port):
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:4a9be8b4-df85-11e3-ab20-becafe000bed">
<data>
!Command: show running-config interface Ethernet1/%(port)s
!Time: Mon May 19 18:40:08 2014
version 6.0(2)U2(4)
interface Ethernet1/%(port)s
description CUST32fdc565-7860-47b9-be57-f5d5ee1875a0-host
switchport access vlan 3
spanning-tree port type edge
spanning-tree bpduguard enable
</data>
</rpc-reply>"""
return res % ({'port': port})
def show_port_channel_status(port):
status = "vPC Status: Up, vPC number: %s" % (port)
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:c87305ee-0d19-11e4-ab20-becafe000bed">
<data>
<show>
<interface>
<__XML__INTF_ifeth>
<__XML__PARAM_value>
<__XML__INTF_output>port-channel%(port)s</__XML__INTF_output>
</__XML__PARAM_value>
<__XML__OPT_Cmd_show_interface_if_eth___readonly__>
<__readonly__>
<TABLE_interface>
<ROW_interface>
<interface>port-channel%(port)s</interface>
<state>up</state>
<vpc_status>%(status)s</vpc_status>
</ROW_interface>
</TABLE_interface>
</__readonly__>
</__XML__OPT_Cmd_show_interface_if_eth___readonly__>
</__XML__INTF_ifeth>
</interface>
</show>
</data>
</rpc-reply>"""
return res % ({'port': port,
'status': status})
def show_ethernet_status(port):
res = """<?xml version="1.0" encoding="ISO-8859-1"?>
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"
xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"
xmlns:nxos="http://www.cisco.com/nxos:1.0"
message-id="urn:uuid:c87305ee-0d19-11e4-ab20-becafe000bed">
<data>
<show>
<interface>
<__XML__INTF_ifeth>
<__XML__PARAM_value>
<__XML__INTF_output>ethernet1/%(port)s</__XML__INTF_output>
</__XML__PARAM_value>
<__XML__OPT_Cmd_show_interface_if_eth___readonly__>
<__readonly__>
<TABLE_interface>
<ROW_interface>
<interface>ethernet1/%(port)s</interface>
<state>up</state>
</ROW_interface>
</TABLE_interface>
</__readonly__>
</__XML__OPT_Cmd_show_interface_if_eth___readonly__>
</__XML__INTF_ifeth>
</interface>
</show>
</data>
</rpc-reply>"""
return res % ({'port': port})
|
def ok():
res = '<?xml version="1.0" encoding="ISO-8859-1"?>\n<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"\n xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"\n xmlns:nxos="http://www.cisco.com/nxos:1.0"\n message-id="urn:uuid:e7ef8254-10a6-11e4-b86d-becafe000bed">\n <data/>\n</rpc-reply>'
return res
def show_dhcp(port):
dhcp = 'ip source binding 10.0.0.1 FFFF.FFFF.FFFF.FFFF vlan 1 interface port-channel%s' % port
res = '<?xml version="1.0" encoding="ISO-8859-1"?>\n<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"\n xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"\n xmlns:nxos="http://www.cisco.com/nxos:1.0"\n message-id="urn:uuid:4a9be8b4-df85-11e3-ab20-becafe000bed">\n <data>\n !Command: show running-config dhcp | egrep port-channel%(port)s$\n !Time: Mon May 19 18:40:08 2014\n\n version 6.0(2)U2(4)\n\n interface port-channel%(port)s\n %(dhcp)s\n </data>\n</rpc-reply>'
return res % {'port': port, 'dhcp': dhcp}
def show_port_channel_config_trunked(port):
res = '<?xml version="1.0" encoding="ISO-8859-1"?>\n <rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"\n xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"\n xmlns:nxos="http://www.cisco.com/nxos:1.0"\n message-id="urn:uuid:4a9be8b4-df85-11e3-ab20-becafe000bed">\n <data>\n !Command: show running-config interface port-channel%(port)s\n !Time: Mon May 19 18:40:08 2014\n\n version 6.0(2)U2(4)\n\n interface port-channel%(port)s\n description CUST39a8365c-3b84-4169-bc1a-1efa3ab20e04-host\n switchport mode trunk\n switchport trunk allowed vlan 1,2\n ip verify source dhcp-snooping-vlan\n spanning-tree port type edge trunk\n no negotiate auto\n vpc %(port)s\n\n </data>\n </rpc-reply>'
return res % {'port': port}
def show_ethernet_config_trunked(port):
res = '<?xml version="1.0" encoding="ISO-8859-1"?>\n <rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"\n xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"\n xmlns:nxos="http://www.cisco.com/nxos:1.0"\n message-id="urn:uuid:4a9be8b4-df85-11e3-ab20-becafe000bed">\n <data>\n !Command: show running-config interface Ethernet1/%(port)s\n !Time: Mon May 19 18:40:08 2014\n\n version 6.0(2)U2(4)\n\n interface Ethernet1/%(port)s\n description CUST39a8365c-3b84-4169-bc1a-1efa3ab20e04-host\n no lldp transmit\n switchport mode trunk\n switchport trunk allowed vlan 1,2\n spanning-tree port type edge trunk\n spanning-tree bpduguard enable\n channel-group %(port)s mode active\n\n </data>\n </rpc-reply>'
return res % {'port': port}
def show_ethernet_config_access(port):
res = '<?xml version="1.0" encoding="ISO-8859-1"?>\n <rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"\n xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"\n xmlns:nxos="http://www.cisco.com/nxos:1.0"\n message-id="urn:uuid:4a9be8b4-df85-11e3-ab20-becafe000bed">\n <data>\n !Command: show running-config interface Ethernet1/%(port)s\n !Time: Mon May 19 18:40:08 2014\n\n version 6.0(2)U2(4)\n\n interface Ethernet1/%(port)s\n description CUST32fdc565-7860-47b9-be57-f5d5ee1875a0-host\n switchport access vlan 3\n spanning-tree port type edge\n spanning-tree bpduguard enable\n\n </data>\n </rpc-reply>'
return res % {'port': port}
def show_port_channel_status(port):
status = 'vPC Status: Up, vPC number: %s' % port
res = '<?xml version="1.0" encoding="ISO-8859-1"?>\n<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"\n xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"\n xmlns:nxos="http://www.cisco.com/nxos:1.0"\n message-id="urn:uuid:c87305ee-0d19-11e4-ab20-becafe000bed">\n <data>\n <show>\n <interface>\n <__XML__INTF_ifeth>\n <__XML__PARAM_value>\n <__XML__INTF_output>port-channel%(port)s</__XML__INTF_output>\n </__XML__PARAM_value>\n <__XML__OPT_Cmd_show_interface_if_eth___readonly__>\n <__readonly__>\n <TABLE_interface>\n <ROW_interface>\n <interface>port-channel%(port)s</interface>\n <state>up</state>\n <vpc_status>%(status)s</vpc_status>\n </ROW_interface>\n </TABLE_interface>\n </__readonly__>\n </__XML__OPT_Cmd_show_interface_if_eth___readonly__>\n </__XML__INTF_ifeth>\n </interface>\n </show>\n </data>\n</rpc-reply>'
return res % {'port': port, 'status': status}
def show_ethernet_status(port):
res = '<?xml version="1.0" encoding="ISO-8859-1"?>\n<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"\n xmlns:if="http://www.cisco.com/nxos:1.0:if_manager"\n xmlns:nxos="http://www.cisco.com/nxos:1.0"\n message-id="urn:uuid:c87305ee-0d19-11e4-ab20-becafe000bed">\n <data>\n <show>\n <interface>\n <__XML__INTF_ifeth>\n <__XML__PARAM_value>\n <__XML__INTF_output>ethernet1/%(port)s</__XML__INTF_output>\n </__XML__PARAM_value>\n <__XML__OPT_Cmd_show_interface_if_eth___readonly__>\n <__readonly__>\n <TABLE_interface>\n <ROW_interface>\n <interface>ethernet1/%(port)s</interface>\n <state>up</state>\n </ROW_interface>\n </TABLE_interface>\n </__readonly__>\n </__XML__OPT_Cmd_show_interface_if_eth___readonly__>\n </__XML__INTF_ifeth>\n </interface>\n </show>\n </data>\n</rpc-reply>'
return res % {'port': port}
|
class Solution:
def smallerNumbersThanCurrent(self, nums: List[int]) -> List[int]:
'''
min_list = []
for i in range(len(nums)):
count = 0
for j in range(len(nums)):
if nums[j] < nums[i]:
count = count + 1
min_list.append(count)
return min_list
'''
min_list = sorted(nums)
# index method returns the first match of i
return [min_list.index(i) for i in nums]
|
class Solution:
def smaller_numbers_than_current(self, nums: List[int]) -> List[int]:
"""
min_list = []
for i in range(len(nums)):
count = 0
for j in range(len(nums)):
if nums[j] < nums[i]:
count = count + 1
min_list.append(count)
return min_list
"""
min_list = sorted(nums)
return [min_list.index(i) for i in nums]
|
# def parse_ranges(input_string):
#
# output = []
# ranges = [item.strip() for item in input_string.split(',')]
#
# for item in ranges:
# start, end = [int(i) for i in item.split('-')]
# output.extend(range(start, end + 1))
#
# return output
# def parse_ranges(input_string):
#
# ranges = [item.strip() for item in input_string.split(',')]
#
# for item in ranges:
# start, end = [int(i) for i in item.split('-')]
# yield from range(start, end + 1)
# def parse_ranges(input_string):
#
# ranges = [range_.strip() for range_ in input_string.split(',')]
#
# for item in ranges:
# if '-' in item:
# start, end = [int(i) for i in item.split('-')]
# yield from range(start, end + 1)
# else:
# yield int(item)
def parse_ranges(input_string):
ranges = [range_.strip() for range_ in input_string.split(',')]
for item in ranges:
if '->' in item:
yield int(item.split('-')[0])
elif '-' in item:
start, end = [int(i) for i in item.split('-')]
yield from range(start, end + 1)
else:
yield int(item)
|
def parse_ranges(input_string):
ranges = [range_.strip() for range_ in input_string.split(',')]
for item in ranges:
if '->' in item:
yield int(item.split('-')[0])
elif '-' in item:
(start, end) = [int(i) for i in item.split('-')]
yield from range(start, end + 1)
else:
yield int(item)
|
"""
The question is incorrect as it says you can only travserse an open path once but
uses the same path repeatedly, violating its own rules
"""
def uniquePathsIII(grid: list[list[int]]) -> int:
if not grid or not grid[0]:
return 0
index = dict()
for r in range(rows := len(grid)):
for c in range(cols := len(grid[0])):
if not (v := grid[r][c]):
grid[r][c] = 3
elif v == 2 or v == 1:
index[v] = (r, c)
def coords(r, c):
yield r+1, c
yield r-1, c
yield r, c+1
yield r, c-1
paths = float('inf')
for row, col in index.values():
paths = min(paths,
*sum((1 for r, c in coords(row, col)
if (0 <= r < rows and 0 <= c < cols) and grid[r][c] == 3)))
return paths
|
"""
The question is incorrect as it says you can only travserse an open path once but
uses the same path repeatedly, violating its own rules
"""
def unique_paths_iii(grid: list[list[int]]) -> int:
if not grid or not grid[0]:
return 0
index = dict()
for r in range((rows := len(grid))):
for c in range((cols := len(grid[0]))):
if not (v := grid[r][c]):
grid[r][c] = 3
elif v == 2 or v == 1:
index[v] = (r, c)
def coords(r, c):
yield (r + 1, c)
yield (r - 1, c)
yield (r, c + 1)
yield (r, c - 1)
paths = float('inf')
for (row, col) in index.values():
paths = min(paths, *sum((1 for (r, c) in coords(row, col) if (0 <= r < rows and 0 <= c < cols) and grid[r][c] == 3)))
return paths
|
for row in range(4):
for col in range(4):
if col%3==0 and row<3 or row==3 and col>0:
print('*', end = ' ')
else:
print(' ', end = ' ')
print()
|
for row in range(4):
for col in range(4):
if col % 3 == 0 and row < 3 or (row == 3 and col > 0):
print('*', end=' ')
else:
print(' ', end=' ')
print()
|
def power(integer1, integer2=3):
result = 1
for i in range(integer2):
result = result * integer1
return result
print(power(3))
print(power(3,2))
|
def power(integer1, integer2=3):
result = 1
for i in range(integer2):
result = result * integer1
return result
print(power(3))
print(power(3, 2))
|
#The values G,m1,m2,f and d stands for Gravitational constant, initial mass,final mass,force and distance respectively
G = 6.67 * 10 ** -11
m1 = float(input("The value of the initial mass: "))
m2 = float(input("The value of the final mass: "))
d = float(input("The distance between the bodies: "))
F = (G * m1 * m2)/(d ** 2)
print("The magnitude of the attractive force: " ,F)
|
g = 6.67 * 10 ** (-11)
m1 = float(input('The value of the initial mass: '))
m2 = float(input('The value of the final mass: '))
d = float(input('The distance between the bodies: '))
f = G * m1 * m2 / d ** 2
print('The magnitude of the attractive force: ', F)
|
# Copyright (c) 2019-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
def f_gold ( arr , n ) :
s = [ ]
j = 0
ans = 0
for i in range ( n ) :
while ( j < n and ( arr [ j ] not in s ) ) :
s.append ( arr [ j ] )
j += 1
ans += ( ( j - i ) * ( j - i + 1 ) ) // 2
s.remove ( arr [ i ] )
return ans
#TOFILL
if __name__ == '__main__':
param = [
([3, 4, 5, 6, 12, 15, 16, 17, 20, 20, 22, 24, 24, 27, 28, 34, 37, 39, 39, 41, 43, 49, 49, 51, 55, 62, 63, 67, 71, 74, 74, 74, 77, 84, 84, 89, 89, 97, 99],24,),
([-8, 54, -22, 18, 20, 44, 0, 54, 90, -4, 4, 40, -74, -16],13,),
([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],28,),
([36, 71, 36, 58, 38, 90, 17],4,),
([-90, -32, -16, 18, 38, 82],5,),
([1, 0, 1],2,),
([3, 11, 21, 25, 28, 28, 38, 42, 48, 53, 55, 55, 55, 58, 71, 75, 79, 80, 80, 94, 96, 99],20,),
([-16, -52, -4, -46, 54, 0, 8, -64, -82, -10, -62, -10, 58, 44, -28, 86, -24, 16, 44, 22, -28, -42, -52, 8, 76, -44, -34, 2, 88, -88, -14, -84, -36, -68, 76, 20, 20, -50],35,),
([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],27,),
([19, 13, 61, 32, 92, 90, 12, 81, 52],5,)
]
n_success = 0
for i, parameters_set in enumerate(param):
if f_filled(*parameters_set) == f_gold(*parameters_set):
n_success+=1
print("#Results: %i, %i" % (n_success, len(param)))
|
def f_gold(arr, n):
s = []
j = 0
ans = 0
for i in range(n):
while j < n and arr[j] not in s:
s.append(arr[j])
j += 1
ans += (j - i) * (j - i + 1) // 2
s.remove(arr[i])
return ans
if __name__ == '__main__':
param = [([3, 4, 5, 6, 12, 15, 16, 17, 20, 20, 22, 24, 24, 27, 28, 34, 37, 39, 39, 41, 43, 49, 49, 51, 55, 62, 63, 67, 71, 74, 74, 74, 77, 84, 84, 89, 89, 97, 99], 24), ([-8, 54, -22, 18, 20, 44, 0, 54, 90, -4, 4, 40, -74, -16], 13), ([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], 28), ([36, 71, 36, 58, 38, 90, 17], 4), ([-90, -32, -16, 18, 38, 82], 5), ([1, 0, 1], 2), ([3, 11, 21, 25, 28, 28, 38, 42, 48, 53, 55, 55, 55, 58, 71, 75, 79, 80, 80, 94, 96, 99], 20), ([-16, -52, -4, -46, 54, 0, 8, -64, -82, -10, -62, -10, 58, 44, -28, 86, -24, 16, 44, 22, -28, -42, -52, 8, 76, -44, -34, 2, 88, -88, -14, -84, -36, -68, 76, 20, 20, -50], 35), ([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], 27), ([19, 13, 61, 32, 92, 90, 12, 81, 52], 5)]
n_success = 0
for (i, parameters_set) in enumerate(param):
if f_filled(*parameters_set) == f_gold(*parameters_set):
n_success += 1
print('#Results: %i, %i' % (n_success, len(param)))
|
var1 = 6
var2 = 3
if var1 >= 10:
print("Bigger than 10") #mind about the indent here (can be created by pressing tab button)
else:
print("Smaller or equal to 10") # all the codes written under a section wth tab belongs to that section
if var1 == var2:
print("var1 and 2 are equal")
else:
print("not equal")
if var1 > 5 and var1 < 7:
print("Between")
elif var1 <= 5:
print("Smaller than 5")
else:
print("Bigger than or equal to 7")
if var1 > 5:
if var2 < 5:
print("in good condition")
else:
print("var2 breaks the condition")
else:
print("var1 breaks the condition")
|
var1 = 6
var2 = 3
if var1 >= 10:
print('Bigger than 10')
else:
print('Smaller or equal to 10')
if var1 == var2:
print('var1 and 2 are equal')
else:
print('not equal')
if var1 > 5 and var1 < 7:
print('Between')
elif var1 <= 5:
print('Smaller than 5')
else:
print('Bigger than or equal to 7')
if var1 > 5:
if var2 < 5:
print('in good condition')
else:
print('var2 breaks the condition')
else:
print('var1 breaks the condition')
|
class KNN():
def fit(self, X_train, y_train):
self.X_train = X_train
self.y_train = y_train
def predict(self, X_test, k=3):
predictions = np.zeros(X_test.shape[0])
for i, point in enumerate(X_test):
distances = self._get_distances(point)
k_nearest = self._get_k_nearest(distances, k)
prediction = self._get_predicted_value(k_nearest)
predictions[i] = prediction
return predictions
#helper functions
def _get_distances(self, x):
'''Take an single point and return an array of distances to every point in our dataset'''
distances = np.zeros(self.X_train.shape[0])
for i, point in enumerate(self.X_train):
distances[i] = euc(x, point)
return distances
def _get_k_nearest(self, distances, k):
'''Take in the an array of distances and return the indices of the k nearest points'''
nearest = np.argsort(distances)[:k]
return nearest
def _get_predicted_value(self, k_nearest):
'''Takes in the indices of the k nearest points and returns the mean of their target values'''
return np.mean(self.y_train[k_nearest])
|
class Knn:
def fit(self, X_train, y_train):
self.X_train = X_train
self.y_train = y_train
def predict(self, X_test, k=3):
predictions = np.zeros(X_test.shape[0])
for (i, point) in enumerate(X_test):
distances = self._get_distances(point)
k_nearest = self._get_k_nearest(distances, k)
prediction = self._get_predicted_value(k_nearest)
predictions[i] = prediction
return predictions
def _get_distances(self, x):
"""Take an single point and return an array of distances to every point in our dataset"""
distances = np.zeros(self.X_train.shape[0])
for (i, point) in enumerate(self.X_train):
distances[i] = euc(x, point)
return distances
def _get_k_nearest(self, distances, k):
"""Take in the an array of distances and return the indices of the k nearest points"""
nearest = np.argsort(distances)[:k]
return nearest
def _get_predicted_value(self, k_nearest):
"""Takes in the indices of the k nearest points and returns the mean of their target values"""
return np.mean(self.y_train[k_nearest])
|
__author__ = [
"Alexander Dunn <[email protected]>",
"Alireza Faghaninia <[email protected]>",
]
class BaseDataRetrieval:
"""
Abstract class to retrieve data from various material APIs while adhering to
a quasi-standard format for querying.
## Implementing a new DataRetrieval class
If you have an API which you'd like to incorporate into matminer's data
retrieval tools, using BaseDataRetrieval is the preferred way of doing so.
All DataRetrieval classes should subclass BaseDataRetrieval and implement
the following:
* get_dataframe()
* api_link()
Retrieving data should be done by the user with get_dataframe. Criteria
should be a dictionary which will be used to form a query to the database.
Properties should be a list which defines the columns that will be returned.
While the 'criteria' and 'properties' arguments may have different valid
values depending on the database, they should always have sensible formats
and names if possible. For example, the user should be calling this:
df = MyDataRetrieval().get_dataframe(criteria={'band_gap': 0.0},
properties=['structure'])
...or this:
df = MyDataRetrieval().get_dataframe(criteria={'band_gap': [0.0, 0.15]},
properties=["density of states"])
NOT this:
df = MyDataRetrieval().get_dataframe(criteria={'query.bg[0] && band_gap': 0.0},
properties=['Struct.page[Value]'])
The implemented DataRetrieval class should handle the conversion from a
'sensible' query to a query fit for the individual API and database.
There may be cases where a 'sensible' query is not sufficient to define a
query to the API; in this case, use the get_dataframe kwargs sparingly to
augment the criteria, properties, or form of the underlying API query.
A method for accessing raw DB data with an API-native query *may* be
provided by overriding get_data. The link to the original API documentation
*must* be provided by overriding api_link().
## Documenting a DataRetrieval class
The class documentation for each DataRetrieval class must contain a brief
description of the possible data that can be retrieved with the API source.
It should also detail the form of the criteria and properties that can be
retrieved with the class, and/or should link to a web page showing this
information. The options of the class must all be defined in the `__init__`
function of the class, and we recommend documenting them using the
[Google style](https://google.github.io/styleguide/pyguide.html).
"""
def api_link(self):
"""
The link to comprehensive API documentation or data source.
Returns:
(str): A link to the API documentation for this DataRetrieval class.
"""
raise NotImplementedError("api_link() is not defined!")
def get_dataframe(self, criteria, properties, **kwargs):
"""
Retrieve a dataframe of properties from the database which satisfy
criteria.
Args:
criteria (dict): The name of each criterion is the key; the value
or range of the criterion is the value.
properties (list): Properties to return from the query matching
the criteria. For example, ['structure', 'formula']
Returns:
(pandas DataFrame) The dataframe containing properties as columns
and samples as rows.
"""
raise NotImplementedError("get_dataframe() is not defined!")
|
__author__ = ['Alexander Dunn <[email protected]>', 'Alireza Faghaninia <[email protected]>']
class Basedataretrieval:
"""
Abstract class to retrieve data from various material APIs while adhering to
a quasi-standard format for querying.
## Implementing a new DataRetrieval class
If you have an API which you'd like to incorporate into matminer's data
retrieval tools, using BaseDataRetrieval is the preferred way of doing so.
All DataRetrieval classes should subclass BaseDataRetrieval and implement
the following:
* get_dataframe()
* api_link()
Retrieving data should be done by the user with get_dataframe. Criteria
should be a dictionary which will be used to form a query to the database.
Properties should be a list which defines the columns that will be returned.
While the 'criteria' and 'properties' arguments may have different valid
values depending on the database, they should always have sensible formats
and names if possible. For example, the user should be calling this:
df = MyDataRetrieval().get_dataframe(criteria={'band_gap': 0.0},
properties=['structure'])
...or this:
df = MyDataRetrieval().get_dataframe(criteria={'band_gap': [0.0, 0.15]},
properties=["density of states"])
NOT this:
df = MyDataRetrieval().get_dataframe(criteria={'query.bg[0] && band_gap': 0.0},
properties=['Struct.page[Value]'])
The implemented DataRetrieval class should handle the conversion from a
'sensible' query to a query fit for the individual API and database.
There may be cases where a 'sensible' query is not sufficient to define a
query to the API; in this case, use the get_dataframe kwargs sparingly to
augment the criteria, properties, or form of the underlying API query.
A method for accessing raw DB data with an API-native query *may* be
provided by overriding get_data. The link to the original API documentation
*must* be provided by overriding api_link().
## Documenting a DataRetrieval class
The class documentation for each DataRetrieval class must contain a brief
description of the possible data that can be retrieved with the API source.
It should also detail the form of the criteria and properties that can be
retrieved with the class, and/or should link to a web page showing this
information. The options of the class must all be defined in the `__init__`
function of the class, and we recommend documenting them using the
[Google style](https://google.github.io/styleguide/pyguide.html).
"""
def api_link(self):
"""
The link to comprehensive API documentation or data source.
Returns:
(str): A link to the API documentation for this DataRetrieval class.
"""
raise not_implemented_error('api_link() is not defined!')
def get_dataframe(self, criteria, properties, **kwargs):
"""
Retrieve a dataframe of properties from the database which satisfy
criteria.
Args:
criteria (dict): The name of each criterion is the key; the value
or range of the criterion is the value.
properties (list): Properties to return from the query matching
the criteria. For example, ['structure', 'formula']
Returns:
(pandas DataFrame) The dataframe containing properties as columns
and samples as rows.
"""
raise not_implemented_error('get_dataframe() is not defined!')
|
# -*- coding: utf-8 -*-
class Empty(object):
"""
Empty object represents emptyness state in `grappa`.
"""
def __repr__(self):
return 'Empty'
def __len__(self):
return 0
# Object reference representing emptpyness
empty = Empty()
|
class Empty(object):
"""
Empty object represents emptyness state in `grappa`.
"""
def __repr__(self):
return 'Empty'
def __len__(self):
return 0
empty = empty()
|
__title__ = 'alpha-vantage-py'
__description__ = 'Alpha Vantage Python package.'
__url__ = 'https://github.com/wstolk/alpha-vantage'
__version__ = '0.0.4'
__build__ = 0x022501
__author__ = 'Wouter Stolk'
__author_email__ = '[email protected]'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2021 Wouter Stolk'
__cake__ = u'\u2728 \U0001f370 \u2728'
|
__title__ = 'alpha-vantage-py'
__description__ = 'Alpha Vantage Python package.'
__url__ = 'https://github.com/wstolk/alpha-vantage'
__version__ = '0.0.4'
__build__ = 140545
__author__ = 'Wouter Stolk'
__author_email__ = '[email protected]'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2021 Wouter Stolk'
__cake__ = u'✨ 🍰 ✨'
|
LANGUAGES = [
{"English": "English", "alpha2": "en"},
{"English": "Italian", "alpha2": "it"},
]
|
languages = [{'English': 'English', 'alpha2': 'en'}, {'English': 'Italian', 'alpha2': 'it'}]
|
def max_profit(prices) -> int:
if not prices:
return 0
minPrice = prices[0]
maxPrice = 0
for i in range(1, len(prices)):
minPrice = min(prices[i], minPrice)
maxPrice = max(prices[i],maxPrice)
return maxPrice
arr = [100, 180, 260, 310, 40, 535, 695]
print(max_profit(arr))
|
def max_profit(prices) -> int:
if not prices:
return 0
min_price = prices[0]
max_price = 0
for i in range(1, len(prices)):
min_price = min(prices[i], minPrice)
max_price = max(prices[i], maxPrice)
return maxPrice
arr = [100, 180, 260, 310, 40, 535, 695]
print(max_profit(arr))
|
# Paula Daly Solution to Problem 4 March 2019
# Collatz - particular sequence always reaches 1
# Defining the function
def collatz(number):
# looping through and if number found is even divide it by 2
if number % 2 == 0:
print(number // 2)
return number // 2
# looping through and if the number is odd multiply it by 3 and add 1
elif number % 2 != 0:
result = 3 * number + 1
print(result)
return result
try:
# ask the user to enter a number
n = input("Enter number: ")
# running the function
while n != 1:
n = collatz(int(n))
# what to do if the user inputs a negative integer
except ValueError:
print('Please enter a positive integer')
|
def collatz(number):
if number % 2 == 0:
print(number // 2)
return number // 2
elif number % 2 != 0:
result = 3 * number + 1
print(result)
return result
try:
n = input('Enter number: ')
while n != 1:
n = collatz(int(n))
except ValueError:
print('Please enter a positive integer')
|
"""
Reports: module definition
"""
PROPERTIES = {
'title': 'Reports',
'details': 'Create Reports',
'url': '/reports/',
'system': False,
'type': 'minor',
}
URL_PATTERNS = [
'^/reports/',
]
|
"""
Reports: module definition
"""
properties = {'title': 'Reports', 'details': 'Create Reports', 'url': '/reports/', 'system': False, 'type': 'minor'}
url_patterns = ['^/reports/']
|
"""
Datos de entrada
Presupuesto-->p-->float
Datos de Salida
Presupuesto ginecologia-->g-->float
Presupuesto traumatologiaa-->t-->float
Presupuesto pediatria-->e-->float
"""
#entrada
p=float(input("Digite el presupuesto Total: "))
#caja negra
g=p*0.4
t=p*0.3
e=p*0.3
#salida
print("El presupuesto dedicado a ginecologia: ",g)
print("El presupuesto dedicado a traumatologia: ",t)
print("El presupuesto dedicado a pediatria: ",e)
|
"""
Datos de entrada
Presupuesto-->p-->float
Datos de Salida
Presupuesto ginecologia-->g-->float
Presupuesto traumatologiaa-->t-->float
Presupuesto pediatria-->e-->float
"""
p = float(input('Digite el presupuesto Total: '))
g = p * 0.4
t = p * 0.3
e = p * 0.3
print('El presupuesto dedicado a ginecologia: ', g)
print('El presupuesto dedicado a traumatologia: ', t)
print('El presupuesto dedicado a pediatria: ', e)
|
class ConfigBase:
def __init__(self):
self.zookeeper = ""
self.brokers = ""
self.topic = ""
def __str__(self):
return str(self.zookeeper + ";" + self.brokers + ";" + self.topic)
def set_zookeeper(self, conf):
self.zookeeper = conf
def set_brokers(self, conf):
self.brokers = conf
def set_topic(self, conf):
self.topic = conf
|
class Configbase:
def __init__(self):
self.zookeeper = ''
self.brokers = ''
self.topic = ''
def __str__(self):
return str(self.zookeeper + ';' + self.brokers + ';' + self.topic)
def set_zookeeper(self, conf):
self.zookeeper = conf
def set_brokers(self, conf):
self.brokers = conf
def set_topic(self, conf):
self.topic = conf
|
'''
Created on May 13, 2016
@author: david
'''
if __name__ == '__main__':
pass
|
"""
Created on May 13, 2016
@author: david
"""
if __name__ == '__main__':
pass
|
# numbers_list = [int(x) for x in input().split(", ")]
def find_sum(numbers_list):
result = 1
for i in range(len(numbers_list)):
number = numbers_list[i]
if number <= 5:
result *= number
elif number <= 10:
result /= number
return result
print(find_sum([1, 4, 5]), 20)
print(find_sum([4, 5, 6, 1, 3]), 20)
print(find_sum([2, 5, 10]), 20)
|
def find_sum(numbers_list):
result = 1
for i in range(len(numbers_list)):
number = numbers_list[i]
if number <= 5:
result *= number
elif number <= 10:
result /= number
return result
print(find_sum([1, 4, 5]), 20)
print(find_sum([4, 5, 6, 1, 3]), 20)
print(find_sum([2, 5, 10]), 20)
|
# post to the array-connections/connection-key endpoint to get a connection key
res = client.post_array_connections_connection_key()
print(res)
if type(res) == pypureclient.responses.ValidResponse:
print(list(res.items))
|
res = client.post_array_connections_connection_key()
print(res)
if type(res) == pypureclient.responses.ValidResponse:
print(list(res.items))
|
BOT_CONFIG = 'default'
CONFIGS_DIR_NAME = 'configs'
BOTS_LOG_ROOT = 'bots'
LOG_FILE = 'bots.log'
LOOP_INTERVAL = 60
SETTINGS = 'settings.yml,secrets.yml'
VERBOSITY = 1
|
bot_config = 'default'
configs_dir_name = 'configs'
bots_log_root = 'bots'
log_file = 'bots.log'
loop_interval = 60
settings = 'settings.yml,secrets.yml'
verbosity = 1
|
class BotLogic:
def BotExit(Nachricht):
print("Say Goodbye to the Bot")
# Beim Bot abmelden
def BotStart(Nachricht):
print("Say Hello to the Bot")
# Beim Bot anmelden
|
class Botlogic:
def bot_exit(Nachricht):
print('Say Goodbye to the Bot')
def bot_start(Nachricht):
print('Say Hello to the Bot')
|
"""
1. Clarification
2. Possible solutions
- Python built-in
- Hand-crafted
- Deque
3. Coding
4. Tests
"""
# # T=O(n), S=O(n)
# class Solution:
# def reverseWords(self, s: str) -> str:
# if not s: return ''
# return ' '.join(reversed(s.split()))
# T=O(n), S=O(n) in python or O(1) in c++
class Solution:
def reverseWords(self, s: str) -> str:
if not s: return ''
l = self.trim_spaces(s)
self.reverse(l, 0, len(l) - 1)
self.reverse_each_word(l)
return ''.join(l)
def trim_spaces(self, s: str) -> list:
left, right = 0, len(s) - 1
while left <= right and s[left] == ' ':
left += 1
while left <= right and s[right] == ' ':
right -= 1
output = []
while left <= right:
if s[left] != ' ':
output.append(s[left])
elif output[-1] != ' ':
output.append(s[left])
left += 1
return output
def reverse(self, l: list, left: int, right: int) -> None:
while left < right:
l[left], l[right] = l[right], l[left]
left, right = left + 1, right - 1
def reverse_each_word(self, l: list) -> None:
n = len(l)
start = end = 0
while start < n:
while end < n and l[end] != ' ':
end += 1
self.reverse(l, start, end - 1)
start = end + 1
end += 1
# T=O(n), S=O(n)
class Solution:
def reverseWords(self, s: str) -> str:
if not s: return ''
left, right = 0, len(s) - 1
while left <= right and s[left] == ' ':
left += 1
while left <= right and s[right] == ' ':
right -= 1
d, word = collections.deque(), []
while left <= right:
if s[left] == ' ' and word:
d.appendleft(''.join(word))
word = []
elif s[left] != ' ':
word.append(s[left])
left += 1
d.appendleft(''.join(word))
return ' '.join(d)
|
"""
1. Clarification
2. Possible solutions
- Python built-in
- Hand-crafted
- Deque
3. Coding
4. Tests
"""
class Solution:
def reverse_words(self, s: str) -> str:
if not s:
return ''
l = self.trim_spaces(s)
self.reverse(l, 0, len(l) - 1)
self.reverse_each_word(l)
return ''.join(l)
def trim_spaces(self, s: str) -> list:
(left, right) = (0, len(s) - 1)
while left <= right and s[left] == ' ':
left += 1
while left <= right and s[right] == ' ':
right -= 1
output = []
while left <= right:
if s[left] != ' ':
output.append(s[left])
elif output[-1] != ' ':
output.append(s[left])
left += 1
return output
def reverse(self, l: list, left: int, right: int) -> None:
while left < right:
(l[left], l[right]) = (l[right], l[left])
(left, right) = (left + 1, right - 1)
def reverse_each_word(self, l: list) -> None:
n = len(l)
start = end = 0
while start < n:
while end < n and l[end] != ' ':
end += 1
self.reverse(l, start, end - 1)
start = end + 1
end += 1
class Solution:
def reverse_words(self, s: str) -> str:
if not s:
return ''
(left, right) = (0, len(s) - 1)
while left <= right and s[left] == ' ':
left += 1
while left <= right and s[right] == ' ':
right -= 1
(d, word) = (collections.deque(), [])
while left <= right:
if s[left] == ' ' and word:
d.appendleft(''.join(word))
word = []
elif s[left] != ' ':
word.append(s[left])
left += 1
d.appendleft(''.join(word))
return ' '.join(d)
|
# -*- coding: utf-8 -*-
"""
Created on Tue May 14 10:22:45 2019
@author: Lee
"""
__version_info__ = (0, 3, 10)
__version__ = '.',join(map(str,__version_info__[:3]))
if len(__version_info__) == 4:
__version__+=__version_info[-1]
|
"""
Created on Tue May 14 10:22:45 2019
@author: Lee
"""
__version_info__ = (0, 3, 10)
__version__ = ('.', join(map(str, __version_info__[:3])))
if len(__version_info__) == 4:
__version__ += __version_info[-1]
|
# class => module
MODULE_MAP = {
'ShuffleRerankPlugin': 'plugins.rerank.shuffle',
'PtTransformersRerankPlugin': 'plugins.rerank.transformers',
'PtDistilBertQAModelPlugin': 'plugins.qa.distilbert'
}
# image => directory
IMAGE_MAP = {
'alpine': '../Dockerfiles/alpine',
'pt': '../Dockerfiles/pt'
}
INDEXER_MAP = {
'ESIndexer': 'indexers.es'
}
|
module_map = {'ShuffleRerankPlugin': 'plugins.rerank.shuffle', 'PtTransformersRerankPlugin': 'plugins.rerank.transformers', 'PtDistilBertQAModelPlugin': 'plugins.qa.distilbert'}
image_map = {'alpine': '../Dockerfiles/alpine', 'pt': '../Dockerfiles/pt'}
indexer_map = {'ESIndexer': 'indexers.es'}
|
'''
This file was used in an earlier version; geodata does not currently use
SQLAlchemy.
---
This file ensures all other files use the same SQLAlchemy session and Base.
Other files should import engine, session, and Base when needed. Use:
from initialize_sqlalchemy import Base, session, engine
'''
# # The engine
# from sqlalchemy import create_engine
# engine = create_engine('sqlite:///:memory:')
# # The session
# from sqlalchemy.orm import sessionmaker
# Session = sessionmaker(bind=engine)
# session = Session()
# # The Base
# from sqlalchemy.ext.declarative import declarative_base
# Base = declarative_base()
|
"""
This file was used in an earlier version; geodata does not currently use
SQLAlchemy.
---
This file ensures all other files use the same SQLAlchemy session and Base.
Other files should import engine, session, and Base when needed. Use:
from initialize_sqlalchemy import Base, session, engine
"""
|
# Copyright 2021 IBM All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Constants for unstructured text
The ACD and QuickUMLS NLP services' output for these text strings must
be included in resources/acd/TestReportResponses.json and
resources/quickUmls/TestReportResponses.json
"""
TEXT_FOR_MULTIPLE_CONDITIONS = (
"Patient reports that they recently had pneumonia "
+ "and is now having chest pain. "
+ "The patient was diagnosed with a myocardial infarction."
)
TEXT_FOR_MEDICATION = "Patient is taking Cisplatin."
TEXT_FOR_CONDITION_AND_MEDICATION = (
""
+ "Patient had pneumonia a month ago and has now been diagnosed with "
+ "a myocardial infarction. Prescribed Acebutolol."
)
TEXT_FOR_CONDITION_SUSPECTED_AND_FAM_HISTORY = (
"suspect skin cancer because the patient's brother has skin cancer"
)
TEXT_FOR_ADVERSE_EVENT = (
"The patient's course was also complicated by mental status changes secondary "
+ "to a combination of his narcotics and Neurontin, which had been given for his "
+ "trigeminal neuralgia and chronic pain. The Neurontin was stopped and he "
+ "received hemodialysis on consecutive days."
)
# Important thing with this text is that there are multiple spans over the same
# condition and medication
# With the condition, "myocardial infarction" == "heart attack"
TEXT_FOR_MULTIPLE_ATTRIBUTES_SAME_RESOURCE = (
"The patient had a myocardial infarction in 2015 and was prescribed Losartan. "
+ "His prescription was changed to Irbesartan in 2019. "
+ "He had a second heart attack in 2021, and is now taking Losartan again."
)
|
"""Constants for unstructured text
The ACD and QuickUMLS NLP services' output for these text strings must
be included in resources/acd/TestReportResponses.json and
resources/quickUmls/TestReportResponses.json
"""
text_for_multiple_conditions = 'Patient reports that they recently had pneumonia ' + 'and is now having chest pain. ' + 'The patient was diagnosed with a myocardial infarction.'
text_for_medication = 'Patient is taking Cisplatin.'
text_for_condition_and_medication = '' + 'Patient had pneumonia a month ago and has now been diagnosed with ' + 'a myocardial infarction. Prescribed Acebutolol.'
text_for_condition_suspected_and_fam_history = "suspect skin cancer because the patient's brother has skin cancer"
text_for_adverse_event = "The patient's course was also complicated by mental status changes secondary " + 'to a combination of his narcotics and Neurontin, which had been given for his ' + 'trigeminal neuralgia and chronic pain. The Neurontin was stopped and he ' + 'received hemodialysis on consecutive days.'
text_for_multiple_attributes_same_resource = 'The patient had a myocardial infarction in 2015 and was prescribed Losartan. ' + 'His prescription was changed to Irbesartan in 2019. ' + 'He had a second heart attack in 2021, and is now taking Losartan again.'
|
# The authors of this work have released all rights to it and placed it
# in the public domain under the Creative Commons CC0 1.0 waiver
# (http://creativecommons.org/publicdomain/zero/1.0/).
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# Retrieved from: http://en.literateprograms.org/Generating_all_integer_lattice_points_(Python)?oldid=17065
# Modified for VLC project
'''Code for creating mappings that are used to place symbols in the matrix.'''
def _distance(p):
return p[0]**2 + p[1]**2
def _may_use(vu, shape):
'''Returns true if the entry v, u may be used in a conjugate symmetric matrix.
In other words, the function returns false for v, u pairs that must be
their own conjugate. These pairs cannot be used to store complex symbols.
`shape` is the shape of the matrix into which the symbols are packed.
'''
n, m = shape
m = (m/2) + 1
v, u = vu
if v < 0:
v = n+v
# May not use DC component
if u == 0 and v == 0:
return False
# May not use lower half of 0 colument
max_v_u0 = n/2 if n % 2 == 1 else n/2-1
if u == 0 and v > max_v_u0:
return False
# Perform some additional bounds checking here.
# Raise an exception if the check fails, as it is
# a programming error.
max_u = m-1 if shape[1] % 2 == 1 else m-2
if u > max_u:
raise IndexError('Mapping tries to set illegal entry. '
'(Are you trying to pack too many symbols?)')
return True
def halfring_generator(shape, limit=None):
'''Generates a sequence of (v,u) tuples that describe a halfring.'''
# TODO Bounds checking for the shape
ymax = [0]
d = 0
while limit is None or d <= limit:
yieldable = []
while 1:
batch = []
for x in range(d+1):
y = ymax[x]
if _distance((x, y)) <= d**2: # Note: distance squared
batch.append((y, x))
if y != 0:
batch.append((-y, x))
ymax[x] += 1
if not batch:
break
yieldable += batch
yieldable.sort(key=_distance)
for p in yieldable:
if _may_use(p, shape):
yield p
d += 1
ymax.append(0) # Extend to make room for column[d]
def halfring(n, shape):
'''Returns a list (v,u) tuples that describe a halfring.'''
g = halfring_generator(shape)
return [next(g) for _ in xrange(n)]
|
"""Code for creating mappings that are used to place symbols in the matrix."""
def _distance(p):
return p[0] ** 2 + p[1] ** 2
def _may_use(vu, shape):
"""Returns true if the entry v, u may be used in a conjugate symmetric matrix.
In other words, the function returns false for v, u pairs that must be
their own conjugate. These pairs cannot be used to store complex symbols.
`shape` is the shape of the matrix into which the symbols are packed.
"""
(n, m) = shape
m = m / 2 + 1
(v, u) = vu
if v < 0:
v = n + v
if u == 0 and v == 0:
return False
max_v_u0 = n / 2 if n % 2 == 1 else n / 2 - 1
if u == 0 and v > max_v_u0:
return False
max_u = m - 1 if shape[1] % 2 == 1 else m - 2
if u > max_u:
raise index_error('Mapping tries to set illegal entry. (Are you trying to pack too many symbols?)')
return True
def halfring_generator(shape, limit=None):
"""Generates a sequence of (v,u) tuples that describe a halfring."""
ymax = [0]
d = 0
while limit is None or d <= limit:
yieldable = []
while 1:
batch = []
for x in range(d + 1):
y = ymax[x]
if _distance((x, y)) <= d ** 2:
batch.append((y, x))
if y != 0:
batch.append((-y, x))
ymax[x] += 1
if not batch:
break
yieldable += batch
yieldable.sort(key=_distance)
for p in yieldable:
if _may_use(p, shape):
yield p
d += 1
ymax.append(0)
def halfring(n, shape):
"""Returns a list (v,u) tuples that describe a halfring."""
g = halfring_generator(shape)
return [next(g) for _ in xrange(n)]
|
class EntityForm(django.forms.ModelForm):
class Meta:
model = Entity
exclude = (u'homepage', u'image')
|
class Entityform(django.forms.ModelForm):
class Meta:
model = Entity
exclude = (u'homepage', u'image')
|
'''
Simpler to just do the math than iterating. Break each number from 1 to 20 into its prime components.
The find the maximum times each unique prime occurs in any of the numbers.
Include the prime that number of times.
For instance, 9 is ( 3 * 3 ) and 18 is ( 3 * 3 * 2 ), both of which are the most
number of times 3 shows up.
So include 3 twice. Cheaper to do so using 9 since we already get a 2 from other numbers.
This works since primes can be recombined to form any of the numbers, but you need
to have enough of the primes to actually do so. You can use 5 and 2 to make 10, but
you need to include another 2 to make 20, for instance.
'''
print(16 * 17 * 5 * 13 * 9 * 11 * 19 * 7) # 232792560
# print(
# (2 * 2 * 2 * 2) * (17) * (5) * (13) * (3 * 3) * (11) * (19) * (7)
# )
|
"""
Simpler to just do the math than iterating. Break each number from 1 to 20 into its prime components.
The find the maximum times each unique prime occurs in any of the numbers.
Include the prime that number of times.
For instance, 9 is ( 3 * 3 ) and 18 is ( 3 * 3 * 2 ), both of which are the most
number of times 3 shows up.
So include 3 twice. Cheaper to do so using 9 since we already get a 2 from other numbers.
This works since primes can be recombined to form any of the numbers, but you need
to have enough of the primes to actually do so. You can use 5 and 2 to make 10, but
you need to include another 2 to make 20, for instance.
"""
print(16 * 17 * 5 * 13 * 9 * 11 * 19 * 7)
|
"""
Mixin for returning the intersected frames.
"""
class IntersectedFramesMixin:
"""
Mixin for returning the intersected frames.
"""
def __init__(self):
self.intersected_frames = []
|
"""
Mixin for returning the intersected frames.
"""
class Intersectedframesmixin:
"""
Mixin for returning the intersected frames.
"""
def __init__(self):
self.intersected_frames = []
|
n, k = map(int, input().split())
arr = list(map(int, input().split()))
arr_find = list(map(int, input().split()))
def bin_search(arr, el):
left = -1
right = len(arr)
while right - left > 1:
mid = (right + left) // 2
if arr[mid] >= el:
right = mid
else:
left = mid
return left, right
for el in arr_find:
left, right = bin_search(arr, el)
print(left, right)
if arr[left] == el:
print('YES')
else:
print('NO')
|
(n, k) = map(int, input().split())
arr = list(map(int, input().split()))
arr_find = list(map(int, input().split()))
def bin_search(arr, el):
left = -1
right = len(arr)
while right - left > 1:
mid = (right + left) // 2
if arr[mid] >= el:
right = mid
else:
left = mid
return (left, right)
for el in arr_find:
(left, right) = bin_search(arr, el)
print(left, right)
if arr[left] == el:
print('YES')
else:
print('NO')
|
def default_getter(attribute=None):
"""a default method for missing renderer method
for example, the support to write data in a specific file type
is missing but the support to read data exists
"""
def none_presenter(_, **__):
"""docstring is assigned a few lines down the line"""
raise NotImplementedError("%s getter is not defined." % attribute)
none_presenter.__doc__ = "%s getter is not defined." % attribute
return none_presenter
def default_setter(attribute=None):
"""a default method for missing parser method
for example, the support to read data in a specific file type
is missing but the support to write data exists
"""
def none_importer(_x, _y, **_z):
"""docstring is assigned a few lines down the line"""
raise NotImplementedError("%s setter is not defined." % attribute)
none_importer.__doc__ = "%s setter is not defined." % attribute
return none_importer
def make_a_property(
cls,
attribute,
doc_string,
getter_func=default_getter,
setter_func=default_setter,
):
"""
create custom attributes for each class
"""
getter = getter_func(attribute)
setter = setter_func(attribute)
attribute_property = property(
# note:
# without fget, fset, pypy 5.4.0 crashes randomly.
fget=getter,
fset=setter,
doc=doc_string,
)
if "." in attribute:
attribute = attribute.replace(".", "_")
else:
attribute = attribute
setattr(cls, attribute, attribute_property)
setattr(cls, "get_%s" % attribute, getter)
setattr(cls, "set_%s" % attribute, setter)
|
def default_getter(attribute=None):
"""a default method for missing renderer method
for example, the support to write data in a specific file type
is missing but the support to read data exists
"""
def none_presenter(_, **__):
"""docstring is assigned a few lines down the line"""
raise not_implemented_error('%s getter is not defined.' % attribute)
none_presenter.__doc__ = '%s getter is not defined.' % attribute
return none_presenter
def default_setter(attribute=None):
"""a default method for missing parser method
for example, the support to read data in a specific file type
is missing but the support to write data exists
"""
def none_importer(_x, _y, **_z):
"""docstring is assigned a few lines down the line"""
raise not_implemented_error('%s setter is not defined.' % attribute)
none_importer.__doc__ = '%s setter is not defined.' % attribute
return none_importer
def make_a_property(cls, attribute, doc_string, getter_func=default_getter, setter_func=default_setter):
"""
create custom attributes for each class
"""
getter = getter_func(attribute)
setter = setter_func(attribute)
attribute_property = property(fget=getter, fset=setter, doc=doc_string)
if '.' in attribute:
attribute = attribute.replace('.', '_')
else:
attribute = attribute
setattr(cls, attribute, attribute_property)
setattr(cls, 'get_%s' % attribute, getter)
setattr(cls, 'set_%s' % attribute, setter)
|
"""Calculate Net:Gross estimates"""
def calculate_deep_net_gross_model(model, composition):
"""Calculate a net gross estimate based on the given deep composition"""
net_gross = model.assign(
bb_pct=lambda df: df.apply(
lambda row: composition["building_block_type"][row.building_block_type],
axis="columns",
),
cls_ratio=1.0,
)
for building_block_type in ("Channel Fill", "Lobe"):
for filter_class, weights in composition.get(building_block_type, {}).items():
ignores = [v for k, v in weights.items() if k.startswith("Ignore ")]
if ignores and ignores[0]:
idx = net_gross.query(
"building_block_type == @building_block_type"
).index
num_values = len(
[v for v in net_gross.loc[idx, filter_class].unique() if v]
)
net_gross.loc[idx, "cls_ratio"] /= num_values
else:
for value in (
net_gross.query("building_block_type == @building_block_type")
.loc[:, filter_class]
.unique()
):
idx = net_gross.query(
"building_block_type == @building_block_type and "
f"{filter_class} == @value"
).index
net_gross.loc[idx, "cls_ratio"] *= weights.get(value, 0) / 100
return net_gross.assign(
result=lambda df: df.loc[:, ["net_gross", "bb_pct", "cls_ratio"]].prod(
axis="columns"
)
)
def calculate_deep_net_gross(model, composition):
"""Calculate one net gross number"""
return (
calculate_deep_net_gross_model(model=model, composition=composition)
.loc[:, "result"]
.sum()
)
def calculate_shallow_net_gross_model(model, composition):
"""Calculate a net gross estimate based on the given shallow composition"""
net_gross = model.assign(
bb_pct=lambda df: df.apply(
lambda row: composition.get(row.building_block_type, 0)
if composition.get(f"{row.building_block_type} Quality")
== row.descriptive_reservoir_quality
else 0,
axis="columns",
),
)
return net_gross.assign(
result=lambda df: df.loc[:, ["net_gross", "bb_pct"]].prod(axis="columns")
)
def calculate_shallow_net_gross(model, composition):
"""Calculate one net gross number"""
return (
calculate_shallow_net_gross_model(model=model, composition=composition)
.loc[:, "result"]
.sum()
)
|
"""Calculate Net:Gross estimates"""
def calculate_deep_net_gross_model(model, composition):
"""Calculate a net gross estimate based on the given deep composition"""
net_gross = model.assign(bb_pct=lambda df: df.apply(lambda row: composition['building_block_type'][row.building_block_type], axis='columns'), cls_ratio=1.0)
for building_block_type in ('Channel Fill', 'Lobe'):
for (filter_class, weights) in composition.get(building_block_type, {}).items():
ignores = [v for (k, v) in weights.items() if k.startswith('Ignore ')]
if ignores and ignores[0]:
idx = net_gross.query('building_block_type == @building_block_type').index
num_values = len([v for v in net_gross.loc[idx, filter_class].unique() if v])
net_gross.loc[idx, 'cls_ratio'] /= num_values
else:
for value in net_gross.query('building_block_type == @building_block_type').loc[:, filter_class].unique():
idx = net_gross.query(f'building_block_type == @building_block_type and {filter_class} == @value').index
net_gross.loc[idx, 'cls_ratio'] *= weights.get(value, 0) / 100
return net_gross.assign(result=lambda df: df.loc[:, ['net_gross', 'bb_pct', 'cls_ratio']].prod(axis='columns'))
def calculate_deep_net_gross(model, composition):
"""Calculate one net gross number"""
return calculate_deep_net_gross_model(model=model, composition=composition).loc[:, 'result'].sum()
def calculate_shallow_net_gross_model(model, composition):
"""Calculate a net gross estimate based on the given shallow composition"""
net_gross = model.assign(bb_pct=lambda df: df.apply(lambda row: composition.get(row.building_block_type, 0) if composition.get(f'{row.building_block_type} Quality') == row.descriptive_reservoir_quality else 0, axis='columns'))
return net_gross.assign(result=lambda df: df.loc[:, ['net_gross', 'bb_pct']].prod(axis='columns'))
def calculate_shallow_net_gross(model, composition):
"""Calculate one net gross number"""
return calculate_shallow_net_gross_model(model=model, composition=composition).loc[:, 'result'].sum()
|
n = int(input())
current = 1
bigger = False
for i in range(1, n + 1):
for j in range(1, i + 1):
if current > n:
bigger = True
break
print(str(current) + " ", end="")
current += 1
if bigger:
break
print()
|
n = int(input())
current = 1
bigger = False
for i in range(1, n + 1):
for j in range(1, i + 1):
if current > n:
bigger = True
break
print(str(current) + ' ', end='')
current += 1
if bigger:
break
print()
|
n = int(input())
d = list()
for i in range(2):
k=list(map(int,input().split()))
d.append(k)
c = list(zip(*d))
p = []
for i in range(len(c)):
p.append(c[i][0] * c[i][1])
number=sum(p)/sum(d[1])
print("{:.1f}".format(number))
|
n = int(input())
d = list()
for i in range(2):
k = list(map(int, input().split()))
d.append(k)
c = list(zip(*d))
p = []
for i in range(len(c)):
p.append(c[i][0] * c[i][1])
number = sum(p) / sum(d[1])
print('{:.1f}'.format(number))
|
queries = {
"column": {
"head": "select top %d %s from %s;",
"all": "select %s from %s;",
"unique": "select distinct %s from %s;",
"sample": "select top %d %s from %s order by rand();"
},
"table": {
"select": "select %s from %s;",
"head": "select top %d * from %s;",
"all": "select * from %s;",
"unique": "select distinct %s from %s;",
"sample": "select top %d * from %s order by rand();"
},
"system": {
"schema_no_system": """
select
table_name
, column_name
, data_type
from
information_schema.columns
where
table_schema not in ('information_schema', 'sys')
""",
"schema_with_system": """
select
table_name
, column_name
, data_type
from
information_schema.columns;
""",
"schema_specified": """
select
table_name
, column_name
, data_type
from
information_schema.columns
where table_schema in (%s);
""",
"foreign_keys_for_table": """
SELECT
object_name(constraint_object_id) AS foreign_key,
object_name(referenced_object_id) AS referenced_table,
col.name AS referenced_column
FROM sys.foreign_key_columns
INNER JOIN sys.columns col
ON col.column_id = referenced_column_id
AND col.object_id = referenced_object_id
WHERE parent_object_id = object_id('%s');
""",
"foreign_keys_for_column": """
SELECT
object_name(constraint_object_id) AS foreign_key,
object_name(referenced_object_id) AS referenced_table,
col.name AS referenced_column
FROM sys.foreign_key_columns
INNER JOIN sys.columns col
ON col.column_id = referenced_column_id
AND col.object_id = referenced_object_id
WHERE parent_object_id = object_id('%s')
AND constraint_object_id = object_id('%s');
""",
"ref_keys_for_table": """
SELECT
dc.Name AS constraint_column,
t.Name AS referenced_table,
c.Name AS referenced_column
FROM sys.tables t
INNER JOIN sys.default_constraints dc
ON t.object_id = dc.parent_object_id
INNER JOIN sys.columns c
ON dc.parent_object_id = c.object_id
AND c.column_id = dc.parent_column_id
WHERE t.name='%s';
"""
}
}
|
queries = {'column': {'head': 'select top %d %s from %s;', 'all': 'select %s from %s;', 'unique': 'select distinct %s from %s;', 'sample': 'select top %d %s from %s order by rand();'}, 'table': {'select': 'select %s from %s;', 'head': 'select top %d * from %s;', 'all': 'select * from %s;', 'unique': 'select distinct %s from %s;', 'sample': 'select top %d * from %s order by rand();'}, 'system': {'schema_no_system': "\n select\n table_name\n , column_name\n , data_type\n from\n information_schema.columns\n where\n table_schema not in ('information_schema', 'sys')\n ", 'schema_with_system': '\n select\n table_name\n , column_name\n , data_type\n from\n information_schema.columns;\n ', 'schema_specified': '\n select\n table_name\n , column_name\n , data_type\n from\n information_schema.columns\n where table_schema in (%s);\n ', 'foreign_keys_for_table': "\n SELECT\n object_name(constraint_object_id) AS foreign_key,\n object_name(referenced_object_id) AS referenced_table,\n col.name AS referenced_column\n FROM sys.foreign_key_columns\n INNER JOIN sys.columns col\n ON col.column_id = referenced_column_id\n AND col.object_id = referenced_object_id\n WHERE parent_object_id = object_id('%s');\n ", 'foreign_keys_for_column': "\n SELECT\n object_name(constraint_object_id) AS foreign_key,\n object_name(referenced_object_id) AS referenced_table,\n col.name AS referenced_column\n FROM sys.foreign_key_columns\n INNER JOIN sys.columns col\n ON col.column_id = referenced_column_id\n AND col.object_id = referenced_object_id\n WHERE parent_object_id = object_id('%s')\n AND constraint_object_id = object_id('%s');\n ", 'ref_keys_for_table': "\n SELECT\n dc.Name AS constraint_column,\n t.Name AS referenced_table,\n c.Name AS referenced_column\n FROM sys.tables t\n INNER JOIN sys.default_constraints dc\n ON t.object_id = dc.parent_object_id\n INNER JOIN sys.columns c\n ON dc.parent_object_id = c.object_id\n AND c.column_id = dc.parent_column_id\n WHERE t.name='%s';\n "}}
|
def reader():
with open('day3/puzzle_input.txt', 'r') as f:
return f.read().splitlines()
def tree(right, down):
count = 0
index = 0
lines = reader()
for i in range(0, len(lines), down):
line = lines[i]
if line[index] == '#':
count += 1
remainder = len(line) - index - 1
index = (
index + right if remainder > right - 1 else right - 1 - remainder
)
return count
multiply = 1
for right, down in ((1, 1), (3, 1), (5, 1), (7, 1), (1, 2)):
multiply *= tree(right, down)
print(multiply)
|
def reader():
with open('day3/puzzle_input.txt', 'r') as f:
return f.read().splitlines()
def tree(right, down):
count = 0
index = 0
lines = reader()
for i in range(0, len(lines), down):
line = lines[i]
if line[index] == '#':
count += 1
remainder = len(line) - index - 1
index = index + right if remainder > right - 1 else right - 1 - remainder
return count
multiply = 1
for (right, down) in ((1, 1), (3, 1), (5, 1), (7, 1), (1, 2)):
multiply *= tree(right, down)
print(multiply)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.