seq_id
stringlengths 7
11
| text
stringlengths 156
1.7M
| repo_name
stringlengths 7
125
| sub_path
stringlengths 4
132
| file_name
stringlengths 4
77
| file_ext
stringclasses 6
values | file_size_in_byte
int64 156
1.7M
| program_lang
stringclasses 1
value | lang
stringclasses 38
values | doc_type
stringclasses 1
value | stars
int64 0
24.2k
⌀ | dataset
stringclasses 1
value | pt
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|---|
43574733165
|
import os
import unittest
import re
from unittest import mock
import tempfile
with mock.patch('cffi.FFI.dlopen', return_value=mock.MagicMock()):
from geopmdpy.system_files import ActiveSessions, AccessLists, WriteLock
# Patch dlopen to allow the tests to run when there is no build
with mock.patch('cffi.FFI.dlopen', return_value=mock.MagicMock()):
from geopmdpy.service import PlatformService
from geopmdpy.service import TopoService
class TestPlatformService(unittest.TestCase):
def setUp(self):
self._test_name = 'TestPlatformService'
self._RUN_PATH = tempfile.TemporaryDirectory('{}_run'.format(self._test_name))
self._mock_active_sessions = mock.create_autospec(ActiveSessions)
self._mock_active_sessions.get_clients.return_value = []
self._check_client_active_err_msg = "Injected error"
self._mock_active_sessions.check_client_active.side_effect = \
RuntimeError(self._check_client_active_err_msg) # Until open_mock_session is called
self._mock_access_lists = mock.create_autospec(AccessLists)
self._mock_write_lock = mock.create_autospec(WriteLock)
self._mock_write_lock.try_lock.return_value = None
self._mock_write_lock.unlock.return_value = None
with mock.patch('geopmdpy.system_files.ActiveSessions', return_value=self._mock_active_sessions), \
mock.patch('geopmdpy.system_files.AccessLists', return_value=self._mock_access_lists), \
mock.patch('geopmdpy.system_files.WriteLock', return_value=self._mock_write_lock):
self._platform_service = PlatformService()
self._platform_service._RUN_PATH = self._RUN_PATH.name
self._platform_service._active_sessions._RUN_PATH = self._RUN_PATH.name
self._session_file_format = os.path.join(self._RUN_PATH.name, 'session-{client_pid}.json')
def tearDown(self):
self._RUN_PATH.cleanup()
def test_close_already_closed(self):
# We already have two independent components with the session.
client_pid = -999
self.open_mock_session('user_name', client_pid, True, 2) # 2
self._platform_service.close_session(client_pid) # 1
self._platform_service.close_session(client_pid) # 0
self._platform_service._active_sessions.check_client_active = mock.MagicMock(side_effect=RuntimeError)
with self.assertRaises(RuntimeError):
self._platform_service.close_session(client_pid) # error here
def test_read_already_closed(self):
# We already have two independent components with the session.
client_pid = -999
self.open_mock_session('user_name', client_pid, True, 2) # 2
self._platform_service.close_session(client_pid) # 1
self._platform_service.close_session(client_pid) # 0
self._platform_service._active_sessions.check_client_active = mock.MagicMock(side_effect=RuntimeError)
with self.assertRaises(RuntimeError):
self._platform_service.read_signal(client_pid, 'CPU_FREQUENCY', 0, 0) # error here
def test_get_signal_info(self):
signals = ['energy', 'frequency', 'power']
descriptions = ['desc0', 'desc1', 'desc2']
domains = [0, 1, 2]
infos = [(0, 0, 0), (1, 1, 1), (2, 2, 2)]
expected_result = list(zip(signals, descriptions, domains))
for idx in range(len(expected_result)):
expected_result[idx] = expected_result[idx] + infos[idx]
with mock.patch('geopmdpy.pio.signal_description', side_effect=descriptions) as mock_desc, \
mock.patch('geopmdpy.pio.signal_domain_type', side_effect=domains) as mock_dom, \
mock.patch('geopmdpy.pio.signal_info', side_effect=infos) as mock_inf:
signal_info = self._platform_service.get_signal_info(signals)
self.assertEqual(expected_result, signal_info)
calls = [mock.call(cc) for cc in signals]
mock_desc.assert_has_calls(calls)
mock_dom.assert_has_calls(calls)
mock_inf.assert_has_calls(calls)
def test_get_control_info(self):
controls = ['fan', 'frequency', 'power']
descriptions = ['desc0', 'desc1', 'desc2']
domains = [0, 1, 2]
expected_result = list(zip(controls, descriptions, domains))
with mock.patch('geopmdpy.pio.control_description', side_effect=descriptions) as mock_desc, \
mock.patch('geopmdpy.pio.control_domain_type', side_effect=domains) as mock_dom:
control_info = self._platform_service.get_control_info(controls)
self.assertEqual(expected_result, control_info)
calls = [mock.call(cc) for cc in controls]
mock_desc.assert_has_calls(calls)
mock_dom.assert_has_calls(calls)
def test_lock_control(self):
err_msg = 'PlatformService: Implementation incomplete'
with self.assertRaisesRegex(NotImplementedError, err_msg):
self._platform_service.lock_control()
def test_unlock_control(self):
err_msg = 'PlatformService: Implementation incomplete'
with self.assertRaisesRegex(NotImplementedError, err_msg):
self._platform_service.unlock_control()
def test_open_session_twice(self):
self.open_mock_session('', active=True)
def _gen_session_data_helper(self, client_pid, reference_count):
signals_default = ['energy', 'frequency']
controls_default = ['controls', 'geopm', 'named', 'power']
watch_id = 888
session_data = {'client_pid': client_pid,
'reference_count': reference_count,
'mode': 'r',
'signals': signals_default,
'controls': controls_default,
'watch_id': watch_id}
return session_data
def open_mock_session(self, session_user, client_pid=-999, active=False, reference_count=1):
session_data = self._gen_session_data_helper(client_pid, reference_count)
client_pid = session_data['client_pid']
reference_count = session_data['reference_count']
watch_id = session_data['watch_id']
signals = session_data['signals']
controls = session_data['controls']
self._mock_active_sessions.is_client_active.return_value = active
self._mock_active_sessions.get_controls.return_value = controls
self._mock_active_sessions.get_signals.return_value = signals
self._mock_active_sessions.get_reference_count.return_value = reference_count
self._mock_active_sessions.get_watch_id.return_value = watch_id
self._mock_active_sessions.get_batch_server.return_value = None
self._mock_active_sessions.remove_client.return_value = session_data
self._mock_access_lists.get_user_access.return_value = (signals, controls)
with mock.patch('geopmdpy.system_files.AccessLists._get_user_groups', return_value=[]), \
mock.patch('geopmdpy.service.PlatformService._watch_client', return_value=watch_id):
self._platform_service.open_session(session_user, client_pid)
self._mock_active_sessions.is_client_active.assert_called_with(client_pid)
if not active:
self._mock_active_sessions.add_client.assert_called_with(client_pid, signals, controls, watch_id)
self._mock_access_lists.get_user_access.assert_called()
else:
self._mock_active_sessions.add_client.assert_not_called()
self._mock_active_sessions.check_client_active.side_effect = None # session is now active
return session_data
def test_open_session(self):
self.open_mock_session('')
def test_close_session_invalid(self):
client_pid = 999
with self.assertRaisesRegex(RuntimeError, self._check_client_active_err_msg):
self._platform_service.close_session(client_pid)
def test_close_session_read(self):
session_data = self.open_mock_session('')
client_pid = session_data['client_pid']
watch_id = session_data['watch_id']
with mock.patch('gi.repository.GLib.source_remove', return_value=[]) as mock_source_remove, \
mock.patch('geopmdpy.pio.restore_control_dir') as mock_restore_control_dir, \
mock.patch('shutil.rmtree', return_value=[]) as mock_rmtree:
self._platform_service.close_session(client_pid)
mock_restore_control_dir.assert_not_called()
mock_rmtree.assert_not_called()
mock_source_remove.assert_called_once_with(watch_id)
self._mock_active_sessions.remove_client.assert_called_once_with(client_pid)
def test_close_session_write(self):
session_data = self.open_mock_session('')
client_pid = session_data['client_pid']
watch_id = session_data['watch_id']
self._mock_write_lock.try_lock.return_value = client_pid
with mock.patch('geopmdpy.pio.save_control_dir') as mock_save_control_dir, \
mock.patch('geopmdpy.pio.write_control') as mock_write_control, \
mock.patch('os.getsid', return_value=client_pid) as mock_getsid:
self._platform_service.write_control(client_pid, 'geopm', 'board', 0, 42.024)
mock_save_control_dir.assert_called_once()
mock_write_control.assert_called_once_with('geopm', 'board', 0, 42.024)
with mock.patch('gi.repository.GLib.source_remove', return_value=[]) as mock_source_remove, \
mock.patch('geopmdpy.pio.restore_control_dir', return_value=[]) as mock_restore_control_dir, \
mock.patch('os.getsid', return_value=client_pid) as mock_getsid:
self._platform_service.close_session(client_pid)
mock_restore_control_dir.assert_called_once()
save_dir = os.path.join(self._platform_service._RUN_PATH,
self._platform_service._SAVE_DIR)
mock_source_remove.assert_called_once_with(watch_id)
self.assertFalse(self._platform_service._active_sessions.is_client_active(client_pid))
session_file = self._session_file_format.format(client_pid=client_pid)
self.assertFalse(os.path.exists(session_file))
def test_start_batch_invalid(self):
session_data = self.open_mock_session('')
client_pid = session_data['client_pid']
watch_id = session_data['watch_id']
valid_signals = session_data['signals']
signal_config = [(0, 0, sig) for sig in valid_signals]
valid_controls = session_data['controls']
bogus_controls = [(0, 0, 'invalid_frequency'), (0, 0, 'invalid_energy')]
control_config = [(0, 0, con) for con in valid_controls]
control_config.extend(bogus_controls)
err_msg = re.escape('Requested controls that are not in allowed list: {}' \
.format(sorted({bc[2] for bc in bogus_controls})))
with self.assertRaisesRegex(RuntimeError, err_msg):
self._platform_service.start_batch(client_pid, signal_config,
control_config)
bogus_signals = [(0, 0, 'invalid_uncore'), (0, 0, 'invalid_power')]
signal_config.extend(bogus_signals)
err_msg = re.escape('Requested signals that are not in allowed list: {}' \
.format(sorted({bs[2] for bs in bogus_signals})))
with self.assertRaisesRegex(RuntimeError, err_msg):
self._platform_service.start_batch(client_pid, signal_config,
control_config)
def test_start_batch_write_blocked(self):
"""Write mode batch server will not start when write lock is held
This test calls write_control without a session leader, and then a
different PID tries to create a write mode batch server with a session
leader. This request should fail.
"""
client_pid = 999
client_sid = 333
other_pid = 666
control_name = 'geopm'
domain = 7
domain_idx = 42
setting = 777
session_data = self.open_mock_session('other', other_pid)
mock_pwuid = mock.MagicMock()
self._mock_write_lock.try_lock.return_value = other_pid
with mock.patch('geopmdpy.pio.write_control', return_value=[]) as mock_write_control, \
mock.patch('geopmdpy.pio.save_control_dir'), \
mock.patch('os.getsid', return_value=other_pid) as mock_getsid:
self._platform_service.write_control(other_pid, control_name, domain, domain_idx, setting)
mock_write_control.assert_called_once_with(control_name, domain, domain_idx, setting)
session_data = self.open_mock_session('', client_pid)
valid_signals = session_data['signals']
valid_controls = session_data['controls']
signal_config = [(0, 0, sig) for sig in valid_signals]
control_config = [(0, 0, con) for con in valid_controls]
mock_pwuid.pw_name = 'test_user'
err_msg = f'The PID {client_pid} requested write access, but the geopm service already has write mode client with PID or SID of {abs(other_pid)}'
with self.assertRaisesRegex(RuntimeError, err_msg), \
mock.patch('geopmdpy.pio.start_batch_server', return_value = (2345, "2345")), \
mock.patch('os.getsid', return_value=client_sid) as mock_getsid, \
mock.patch('pwd.getpwuid', return_value=mock_pwuid) as mock_getpwuid, \
mock.patch('psutil.pid_exists', return_value=True) as mock_pid_exists:
self._platform_service.start_batch(client_pid, signal_config,
control_config)
def test_start_batch(self):
session_data = self.open_mock_session('')
client_pid = session_data['client_pid']
watch_id = session_data['watch_id']
valid_signals = session_data['signals']
valid_controls = session_data['controls']
signal_config = [(0, 0, sig) for sig in valid_signals]
control_config = [(0, 0, con) for con in valid_controls]
expected_result = (1234, "1234")
self._mock_write_lock.try_lock.return_value = client_pid
with mock.patch('geopmdpy.pio.start_batch_server', return_value=expected_result), \
mock.patch('geopmdpy.pio.save_control_dir'), \
mock.patch('os.getsid', return_value=client_pid) as mock_getsid:
actual_result = self._platform_service.start_batch(client_pid, signal_config,
control_config)
self.assertEqual(expected_result, actual_result,
msg='start_batch() did not pass back correct result')
save_dir = os.path.join(self._platform_service._RUN_PATH,
self._platform_service._SAVE_DIR)
self.assertTrue(os.path.isdir(save_dir),
msg = 'Directory does not exist: {}'.format(save_dir))
self._mock_active_sessions.get_batch_server.return_value = expected_result[0]
with mock.patch('geopmdpy.pio.stop_batch_server', return_value=[]) as mock_stop_batch_server, \
mock.patch('psutil.pid_exists', return_value=True) as mock_pid_exists:
self._platform_service.stop_batch(client_pid, expected_result[0])
mock_stop_batch_server.assert_called_once_with(expected_result[0])
def test_stop_batch_invalid(self):
with self.assertRaisesRegex(RuntimeError, self._check_client_active_err_msg):
self._platform_service.stop_batch('', '')
def test_read_signal_invalid(self):
with self.assertRaisesRegex(RuntimeError, self._check_client_active_err_msg):
self._platform_service.read_signal('', '', '', '')
session_data = self.open_mock_session('')
client_pid = session_data['client_pid']
signal_name = 'geopm'
err_msg = 'Requested signal that is not in allowed list: {}'.format(signal_name)
with self.assertRaisesRegex(RuntimeError, err_msg):
self._platform_service.read_signal(client_pid, signal_name, '', '')
def test_read_signal(self):
session_data = self.open_mock_session('')
client_pid = session_data['client_pid']
signal_name = 'energy'
domain = 7
domain_idx = 42
with mock.patch('geopmdpy.pio.read_signal', return_value=[]) as rs:
self._platform_service.read_signal(client_pid, signal_name, domain, domain_idx)
rs.assert_called_once_with(signal_name, domain, domain_idx)
def test_write_control_invalid(self):
with self.assertRaisesRegex(RuntimeError, self._check_client_active_err_msg):
self._platform_service.write_control('', '', '', '', '')
session_data = self.open_mock_session('')
client_pid = session_data['client_pid']
control_name = 'energy'
err_msg = 'Requested control that is not in allowed list: {}'.format(control_name)
with self.assertRaisesRegex(RuntimeError, err_msg):
self._platform_service.write_control(client_pid, control_name, '', '', '')
def test_write_control(self):
session_data = self.open_mock_session('')
client_pid = session_data['client_pid']
self._mock_write_lock.try_lock.return_value = client_pid
control_name = 'geopm'
domain = 7
domain_idx = 42
setting = 777
with mock.patch('geopmdpy.pio.write_control', return_value=[]) as mock_write_control, \
mock.patch('geopmdpy.pio.save_control_dir'), \
mock.patch('os.getsid', return_value=client_pid) as mock_getsid:
self._platform_service.write_control(client_pid, control_name, domain, domain_idx, setting)
mock_write_control.assert_called_once_with(control_name, domain, domain_idx, setting)
def test_restore_already_closed(self):
client_pid = -999
session_data = self.open_mock_session('user_name', client_pid, True, 2) # 2
self._platform_service.close_session(client_pid) # 1
self._platform_service.close_session(client_pid) # 0
self._platform_service._active_sessions.check_client_active = mock.MagicMock(side_effect=RuntimeError)
with self.assertRaises(RuntimeError):
self._platform_service.restore_control(client_pid)
def test_restore_write_blocked(self):
client_pid = 999
client_sid = 333
other_pid = 666
control_name = 'geopm'
domain = 7
domain_idx = 42
setting = 777
self.open_mock_session('other', other_pid)
with mock.patch('geopmdpy.pio.write_control', return_value=[]) as mock_write_control, \
mock.patch('geopmdpy.pio.save_control_dir'), \
mock.patch('os.getsid', return_value=other_pid):
self._platform_service.write_control(other_pid, control_name, domain, domain_idx, setting)
mock_write_control.assert_called_once_with(control_name, domain, domain_idx, setting)
self.open_mock_session('', client_pid)
mock_pwuid = mock.MagicMock()
mock_pwuid.pw_name = 'test_user'
self._mock_write_lock.try_lock.return_value = other_pid
err_msg = f'The PID {client_pid} requested write access, but the geopm service already has write mode client with PID or SID of {abs(other_pid)}'
with self.assertRaisesRegex(RuntimeError, err_msg), \
mock.patch('geopmdpy.pio.restore_control_dir'), \
mock.patch('os.getsid', return_value=client_sid), \
mock.patch('pwd.getpwuid', return_value=mock_pwuid), \
mock.patch('psutil.pid_exists', return_value=True):
self._platform_service.restore_control(client_pid)
def test_restore_control(self):
session_data = self.open_mock_session('')
client_pid = session_data['client_pid']
self._mock_write_lock.try_lock.return_value = client_pid
with mock.patch('geopmdpy.pio.save_control_dir'), \
mock.patch('os.getsid', return_value=client_pid), \
mock.patch('geopmdpy.pio.restore_control_dir') as mock_restore_control_dir:
self._platform_service.restore_control(client_pid)
save_dir = os.path.join(self._platform_service._RUN_PATH,
self._platform_service._SAVE_DIR)
mock_restore_control_dir.assert_called_once_with(save_dir)
def test_get_cache(self):
topo = mock.MagicMock()
topo_service = TopoService(topo=topo)
mock_open = mock.mock_open(read_data='data')
cache_file = '/run/geopm/geopm-topo-cache'
with mock.patch('builtins.open', mock_open):
cache_data = topo_service.get_cache()
self.assertEqual('data', cache_data)
topo.assert_has_calls([mock.call.create_cache()])
calls = [mock.call(cache_file),
mock.call().__enter__(),
mock.call().read(),
mock.call().__exit__(None, None, None)]
mock_open.assert_has_calls(calls)
if __name__ == '__main__':
unittest.main()
|
geopm/geopm
|
service/geopmdpy_test/TestPlatformService.py
|
TestPlatformService.py
|
py
| 21,394 |
python
|
en
|
code
| 79 |
github-code
|
6
|
72474001787
|
import random
import numpy as np
from math import sqrt, log
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
x1_list = []
x2_list = []
y_list = []
counter = 0
def drawFunc(minX, minY, maxX, maxY):
fig, ax = plt.subplots(subplot_kw={"projection": "3d"})
ax.set_xlabel('x1')
ax.set_ylabel('x2')
ax.set_zlabel('f(x1,x2)')
x1_array = np.arange(minX, maxX, 0.1)
x2_array = np.arange(minY, maxY, 0.1)
x1_array, x2_array = np.meshgrid(x1_array, x2_array)
R = f(x1_array, x2_array)
ax.plot_surface(x1_array, x2_array, R, color='b', alpha=0.5)
plt.show()
def drawBoder(ax, x1, g, z_min, z_max):
zs = np.arange(0, 300, 100)
X, Z = np.meshgrid(x1, zs)
Y = g(X)
#fig = plt.figure()
#ax = fig.add_subplot(111, projection='3d')
ax.plot_surface(X, Y, Z)
def show(x1_list, x2_list):
N = int(x1_list.__len__())
if (N <= 0):
return
fig, ax = plt.subplots(subplot_kw={"projection": "3d"})
x1_array = []
x2_array = []
#x1_array = np.arange(min(x1_list) - 0.1, max(x1_list) + 0.1, 0.1)
#x2_array = np.arange(min(x2_list) - 0.1, max(x2_list) + 0.1, 0.1)
nums = np.arange(0, 5, 0.1)
for i in range(len(nums)):
for j in range(len(nums)):
if(barier(nums[i], nums[j])):
x1_array.append(nums[i])
x2_array.append(nums[j])
x1_array = np.array(x1_array)
x2_array = np.array(x2_array)
x1_array, x2_array = np.meshgrid(x1_array, x2_array)
R = f(x1_array, x2_array)
#drawBoder(ax, x1_array, g1_1, R.min(), R.max())
#drawBoder(ax, x1_array, g2_1, R.min(), R.max())
#drawBoder(ax, x1_array, g3_1, R.min(), R.max())
#drawBoder(ax, x1_array, g4_1, R.min(), R.max())
ax.set_xlabel('x1')
ax.set_ylabel('x2')
ax.set_zlabel('f(x1,x2)')
ax.plot_surface(x1_array, x2_array, R, color='b', alpha=0.5)
x1_list2 = []
x2_list2 = []
f_list = []
ax.scatter(x1_list[0], x2_list[0], f(x1_list[0], x2_list[0]), c='black')
x1_list2.append(x1_list[0])
x2_list2.append(x2_list[0])
f_list.append(f(x1_list[0], x2_list[0]))
for n in range(1, N - 1):
ax.scatter(x1_list[n], x2_list[n], f(x1_list[n], x2_list[n]), c='red')
x1_list2.append(x1_list[n])
x2_list2.append(x2_list[n])
f_list.append(f(x1_list[n], x2_list[n]))
ax.scatter(x1_list[N - 1], x2_list[N - 1], f(x1_list[N - 1], x2_list[N - 1]), c='green')
x1_list2.append(x1_list[N - 1])
x2_list2.append(x2_list[N - 1])
f_list.append(f(x1_list[N - 1], x2_list[n]))
ax.plot(x1_list2, x2_list2, f_list, color="black")
plt.show()
def f_1(x1, x2):
if (g1_1(x1,x2) and g2_1(x1,x2) and g3_1(x1,x2) and g4_1(x1,x2)):
return (x1-6)**2 +(x2-7)**2
return 0
def g1_1(x1):
return (-3*x1 + 6) / 2
def g2_1(x1):
return (-x1 - 3) / (-1)
def g3_1(x1):
return (x1 - 7) / (-1)
def g4_1(x1):
return (2*x1 - 4) / 3
def f(x1, x2):
return (x1-6)**2 +(x2-7)**2
def g1(x1, x2):
return -3*x1 - 2*x2 + 6 #<= 0
def g2(x1, x2):
return -x1 + x2 - 3 #<= 0
def g3(x1, x2):
return x1 + x2 - 7 #<= 0
def g4(x1, x2):
return 2*x1 - 3*x2 - 4 #<= 0
def g1_t(x, y):
return -3*x - 2*y + 6 <= 0
def g2_t(x, y):
return -x + y - 3 <= 0
def g3_t(x, y):
return x + y - 7 <= 0
def g4_t(x, y):
return 2*x - 3*y - 4 <= 0
def F(x1, x2, r):
#sum = 1/g1(x1, x2) + 1/g2(x1, x2) + 1/g3(x1, x2) + 1/g4(x1, x2)
#- r * sum
return f(x1,x2) + P(x1, x2, r)
def F2(x1, x2, r):
#print("x1 =", x1)
#print("x2 =", x2)
#print("gi =", g3(x1, x2))
#print("log =", log(-g3(x1, x2)))
sum = log(-g1(x1, x2)) + log(-g2(x1, x2)) + log(-g3(x1, x2)) + log(-g4(x1, x2))
return f(x1,x2) - r * sum
def P(x1, x2, r):
sum = 1/g1(x1, x2) + 1/g2(x1, x2) + 1/g3(x1, x2) + 1/g4(x1, x2)
return -r*sum
def P2(x1, x2, r):
sum = log(-g1(x1, x2)) + log(-g2(x1, x2)) + log(-g3(x1, x2)) + log(-g4(x1, x2))
return -r*sum
min_val = -1500000
def calc_r(a, n):
return a * (sqrt(n + 1) - 1 + n) / (n * sqrt(2))
def calc_s(a, n):
return a * (sqrt(n + 1) - 1) / (n * sqrt(2))
def dist(x1, x2):
return sqrt((x1[0] - x2[0])**2 + (x1[1] - x2[1])**2)
def max_f(f):
f_max = max(f)
index = f.index(f_max)
return index
def calc_x_next(x, index, n):
res = np.array([0,0])
for i in range(len(x)):
if (i == index): continue
res = res + x[i]
res *= (2 / (n - 1))
res -= x[index]
return res
def calc_centr(x):
return (x[0][0]+x[1][0]+x[2][0]) / 3, (x[0][1]+x[1][1]+x[2][1]) / 3
def barier(x1, x2):
return not (g1_t(x1, x2) and g2_t(x1, x2) and g3_t(x1, x2) and g4_t(x1, x2))
def simplexnyi_method(x0, e, a, n, r):
global counter
x1 = np.array([x0[0] + calc_r(a, n), x0[1] + calc_s(a, n)])
x2 = np.array([x0[0] + calc_s(a, n), x0[1] + calc_r(a, n)])
if (barier(x0[0], x0[1])): return;
while (barier(x1[0], x1[1]) or barier(x2[0], x2[1])):
a /= 2
x1 = np.array([x0[0] + calc_r(a, n), x0[1] + calc_s(a, n)])
x2 = np.array([x0[0] + calc_s(a, n), x0[1] + calc_r(a, n)])
x = [x0, x1, x2]
counter += 3
while (dist(x[0], x[1]) > e or dist(x[1], x[2]) > e or dist(x[2], x[0]) > e):
#print("center =", calc_centr(x), "f =", f(calc_centr(x)[0], calc_centr(x)[1]))
if (barier(x[0][0], x[0][1]) or barier(x[1][0], x[1][1]) or barier(x[2][0], x[2][1])):
return (center[0], center[1], a)
center = calc_centr(x)
f_list = []
x1_list.append(center[0]); x2_list.append(center[1])
counter += 1
f_list.append(F2(x[0][0], x[0][1], r))
f_list.append(F2(x[1][0], x[1][1], r))
f_list.append(F2(x[2][0], x[2][1], r))
counter += 1
while(True):
f_values = f_list
i = max_f(f_values)
xn = calc_x_next(x, i, n)
if (not barier(xn[0], xn[1])):
fn = F2(xn[0], xn[1], r); counter += 1
#x_new = x.copy()
#x_new[i] = xn
#x_c = calc_centr(x_new)
if (f_values[i] > fn): x[i] = xn ; break
f_values[i] = min_val
if (f_values[0] == min_val and f_values[1] == min_val and f_values[2] == min_val):
a /= 2
x[0] = x[0]
x[1] = np.array([x[0][0] + calc_r(a, n), x[0][1] + calc_s(a, n)])
x[2] = np.array([x[0][0] + calc_s(a, n), x[0][1] + calc_r(a, n)])
break
cur_center = calc_centr(x)
#print(center)
if barier(cur_center[0], cur_center[1]):
return (center[0], center[1], a)
point = calc_centr(x)
return (point[0], point[1], a)
def barrier_function_method(x1, x2, r, C, e, a, n, k):
global counter
counter += 1
min_x1, min_x2, a = simplexnyi_method([x1, x2], e, a, n, r)
fine = P2(min_x1, min_x2, r)
if (abs(fine) <= e):
return [(round(min_x1, round_num),
round(min_x2, round_num),
round(f(min_x1, min_x2), round_num)),
k]
k += 1
r = r/C
return barrier_function_method(min_x1, min_x2, r, C, e, a, n, k)
round_num = 3
x1 = 2
x2 = 2
e = 0.001
#a = 0.001
a = 1; n = 3
r = 1
c = 14
k = 0
result = barrier_function_method(x1, x2, r, c, e, a, n, k)
print(f"Barrier function method: {result[0]}; count of iteractions = {result[1]}")
print('Count of compute function =', counter + 1)
show(x1_list, x2_list)
drawFunc(-5, -5, 15, 15)
|
AlexSmirno/Learning
|
6 Семестр/Оптимизация/Lab_6_test.py
|
Lab_6_test.py
|
py
| 7,638 |
python
|
en
|
code
| 0 |
github-code
|
6
|
20538374789
|
# https://leetcode.com/problems/counting-bits/
"""
Time complexity:- O(N)
Space Complexity:- O(N)
"""
from typing import List
class Solution:
def countBits(self, n: int) -> List[int]:
# Initialize a list 'dp' to store the number of 1 bits for each integer from 0 to 'n'.
dp = [0] * (n + 1)
offset = 1 # Initialize an 'offset' variable to keep track of the power of 2.
# Iterate through integers from 1 to 'n'.
for i in range(1, n + 1):
# Check if 'i' is a power of 2, in which case update the 'offset'.
if offset * 2 == i:
offset = i
# Calculate the number of 1 bits for 'i' using 'offset'.
dp[i] = 1 + dp[i - offset]
return dp # Return the list 'dp' containing the counts.
|
Amit258012/100daysofcode
|
Day51/counting_bits.py
|
counting_bits.py
|
py
| 800 |
python
|
en
|
code
| 0 |
github-code
|
6
|
40889171603
|
# coding:utf-8
import os
APP_NAME = "torweb"
# Server
PORT = 9000
DEBUG = True
# log file
log_path = '/var/tmp/'
# cache
sys_status = [0, 0, 0, 0]
# Tornado
COOKIE_SECRET = "6aOO5ZC55LiN5pWj6ZW/5oGo77yM6Iqx5p+T5LiN6YCP5Lmh5oSB44CC"
TEMPLATE_PATH = 'frontend/templates'
LOGIN_URL = '/login'
avatar_upload_path = './frontend/static/assets/images/avatar/'
common_upload_path = './frontend/static/assets/images/'
static_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'static')
default_page_limit = 7
default_avatar = 'default_doubi.png'
default_404_url = '/static/404.html'
# Session
session_settings = {
'cookie_name': 'session_id',
'cookie_domain': None,
'cookie_expires': 86400, #24 * 60 * 60, # 24 hours in seconds
'ignore_expiry': True,
'ignore_change_ip': True,
'secret_key': COOKIE_SECRET,
'expired_message': 'Session expired',
'httponly': True
}
|
jmpews/torweb
|
settings/common.py
|
common.py
|
py
| 906 |
python
|
en
|
code
| 9 |
github-code
|
6
|
33146835188
|
from flask import Flask, jsonify, redirect, url_for, request
import os
import pymysql
app = Flask(__name__)
@app.route('/')
def index():
response = jsonify({"Choo Choo": "Welcome to your Flask app 🚅"})
response.headers.add("Access-Control-Allow-Origin", "*")
return response
@app.route('/add', methods = ['POST', 'GET'])
def add():
# connection = pymysql.connect( host='containers-us-west-32.railway.app', user='root', passwd='Jyfcd452Xe3tmMsFLYDY', port=5522, db='railway' )
# with connection.cursor() as cursor:
# sql = "INSERT INTO table_name (name, ship, base) VALUES ('Name', 1, 0)"
# cursor.execute(sql)
user = request.form['nm']
response = jsonify({"data": user})
response.headers.add("Access-Control-Allow-Origin", "*")
return response
@app.route('/insert', methods=['POST'])
def insert_row():
name = request.form['name']
ship = request.form['ship']
base = request.form['base']
query = "INSERT INTO marked_systems (name, ship, base) VALUES (%s, %s, %s)"
values = (name, ship, base)
connection = pymysql.connect( host='containers-us-west-32.railway.app', user='root', passwd='Jyfcd452Xe3tmMsFLYDY', port=5522, db='railway' )
with connection.cursor() as cursor:
sql = "SELECT * FROM `marked_systems` WHERE `name`=%s"
cursor.execute(sql, (name))
result = cursor.fetchall()
print(result)
if result:
print("result found, deleting")
query = "DELETE FROM marked_systems WHERE `name`=%s"
cursor.execute(sql, (name))
else:
print("no result found, creating")
cursor.execute(query, values)
connection.commit()
response = jsonify({'status': 'success'})
response.headers.add("Access-Control-Allow-Origin", "*")
return response
@app.route('/data')
def get_data():
connection = pymysql.connect( host='containers-us-west-32.railway.app', user='root', passwd='Jyfcd452Xe3tmMsFLYDY', port=5522, db='railway' )
with connection.cursor() as cursor:
sql = "SELECT * FROM `marked_systems`"
cursor.execute(sql)
result = cursor.fetchall()
print(result)
response = jsonify({"data": result})
response.headers.add("Access-Control-Allow-Origin", "*")
return response
if __name__ == '__main__':
app.run(debug=True, port=os.getenv("PORT", default=5000))
|
zpdunlap/flask
|
main.py
|
main.py
|
py
| 2,425 |
python
|
en
|
code
| 0 |
github-code
|
6
|
73662491386
|
from multiprocessing import Pool
import time
from datetime import datetime
from typing import Any
from tqdm import tqdm
from tqdm.contrib.concurrent import process_map
import itertools
from classifiers import project_algorithms
from data_ingestion import ingestion_functions
from sklearn.model_selection import KFold, GridSearchCV
from sklearn import metrics
from dataclasses import dataclass
import joblib
import os
@dataclass
class Result:
algo_name: str
dataset_name: str
trial_num: int
X_train: Any
Y_train: Any
Y_train_pred: Any
X_test: Any
Y_test: Any
Y_test_pred: Any
# Runtime, in seconds
runtime: float
"""A pandas DataFrame that can be imported
"""
best_index_: int
best_params: Any
cv_results_: Any
def run_trial(with_params) -> Result:
"""Runs a given trial using a given algorithm. Fetches data from data_fetcher.
Args:
data_fetcher (fn -> tuple): [description]
algorithm ([fn]): [description]
num_trial ([type]): [description]
Returns:
[type]: [description]
"""
start = datetime.now()
data_fetcher, algorithm, num_trial = with_params
(algo, params) = algorithm()
X_train, X_test, Y_train, Y_test = data_fetcher()
# GridSearchCV automatically does 5 kfold splits.
search_results = GridSearchCV(algo, params, scoring={
'AUC': 'roc_auc',
'Accuracy': metrics.make_scorer(metrics.accuracy_score),
'F1': 'f1',
'Precision': 'precision',
'Recall': 'recall',
'MCC': metrics.make_scorer(metrics.matthews_corrcoef)
}, refit='Accuracy')
search_results.fit(X_train, Y_train)
opt_classifier = search_results.best_estimator_
opt_classifier.fit(X_train, Y_train)
Y_train_pred = opt_classifier.predict(X_train)
Y_test_pred = opt_classifier.predict(X_test)
# Get metrics for the classifiers
end = datetime.now()
runtime = (end - start).total_seconds()
return Result(
algo_name=algorithm.__name__,
dataset_name=data_fetcher.__name__,
trial_num=num_trial,
X_train = X_train,
Y_train = Y_train,
Y_train_pred = Y_train_pred,
X_test=X_test,
Y_test = Y_test,
Y_test_pred = Y_test_pred,
runtime=runtime,
best_index_=search_results.best_index_,
best_params=search_results.best_params_,
cv_results_ = search_results.cv_results_
)
def run_all_trials():
trial_combinations = list(
itertools.product(ingestion_functions, project_algorithms, list(range(5)))
)
# Runs all concurrently on different CPUs
# My M1 Macbook Air has 8 cores, so 8 + 4 = 12
YOUR_CPU_CORES = 8
results = process_map(run_trial, trial_combinations, max_workers=YOUR_CPU_CORES + 4)
#Single-threaded for easier debugging
# results = [run_trial(tc) for tc in trial_combinations]
timestamp = int(time.time())
for result in tqdm(results, desc="Saving classifiers to disk..."):
# Save the classifier to disk for use in a Jupyter Notebook
folder_path = f"./classifier_cache/{timestamp}/{result.algo_name}/{result.dataset_name}"
try:
os.makedirs(folder_path)
except FileExistsError:
pass
result_filename = folder_path + f"/{result.trial_num}_cls.joblib.pkl"
_ = joblib.dump(result, result_filename, compress=9)
if __name__ == "__main__":
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
run_all_trials()
|
lukebrb/final_project
|
runners.py
|
runners.py
|
py
| 3,580 |
python
|
en
|
code
| 0 |
github-code
|
6
|
41766646913
|
import string
s = input().split()
n = int(s[0])
k = int(s[1])
s = input()
min_occ = n
found_all = True
for i in range(k):
cnt = s.count(string.ascii_uppercase[i])
if cnt == 0:
found_all = False
break
if cnt < min_occ:
min_occ = cnt
if not found_all:
print(0)
else:
print(k*min_occ)
|
gautambp/codeforces
|
1038-A/1038-A-47886872.py
|
1038-A-47886872.py
|
py
| 348 |
python
|
en
|
code
| 0 |
github-code
|
6
|
19317356930
|
# This is a sample Python script.
import random
# Press Shift+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
def load_data(file_path):
data_list = []
labels_list = []
with open(file_path, 'r') as f:
lines = f.readlines()
for line in lines:
values = line.strip().split(',')
data = [float(x) for x in values[:-1]]
label = values[-1]
data_list.append(data)
labels_list.append(label)
return data_list, labels_list
def classes_to_numbers(values):
value = values[0]
labels_list = []
for i in range(len(values)):
if value == values[i]:
labels_list.append(1)
else:
labels_list.append(0)
return labels_list
def count_classes(labels_list):
classes = set()
for label in labels_list:
classes.add(label)
return classes
def random_weights(features_count):
weights = []
for i in range(features_count):
weights.append(random.random())
return weights
def training_perceptron(data, labels, weights, learning_const, epochs):
threshold = random.random()
for epoch in range(epochs):
for i in range(len(data)):
x_i = data[i]
l_i = labels[i]
suma = sum([w * x for w, x in zip(weights, x_i)]) - threshold
output = 1 if suma >= 0 else 0
error = l_i - output
weights = [w + learning_const * error * x for w, x in zip(weights, x_i)]
threshold += error * learning_const
return weights, threshold
def using_perceptron(data, labels, weights, threshold):
guesses = 0
for i in range(len(data)):
x_i = data[i]
l_i = labels[i]
suma = sum([w * x for w, x in zip(weights, x_i)]) - threshold
output = 1 if suma >= 0 else 0
error = l_i - output
guesses += 1 if error == 0 else 0
return guesses / len(data) * 100
if __name__ == '__main__':
train_data, train_labels = load_data("Resources/perceptron.data")
test_data, test_labels = load_data("Resources/perceptron.test.data")
features_count = len(train_data[0])
class_count = len(count_classes(train_labels))
weights = random_weights(features_count)
print("Podaj stałą uczenia")
learning_const = float(input())
print("Czy chcesz podać własny wektor? [1 / 2]")
user_in = input()
while user_in == "1":
vector = input()
vector = vector.strip().split(',')
data = [float(x) for x in vector[:-1]]
test_data.append(data)
test_labels.append(vector[-1])
print("Czy chcesz dodać kolejny? [1 / 2]")
user_in = input()
train_labels = classes_to_numbers(train_labels)
test_labels = classes_to_numbers(test_labels)
output, threshold = training_perceptron(train_data, train_labels, weights, learning_const, 100)
accuracy = using_perceptron(test_data, test_labels, output, threshold)
print("Accuracy: " + str(accuracy) + "%")
|
DC060/Perceptron
|
main.py
|
main.py
|
py
| 3,122 |
python
|
en
|
code
| 0 |
github-code
|
6
|
39830820754
|
from src.exceptions import InvalidCommand
class Load:
"""
**Load: Carrega o comando previamente salvo**
- Line 1: !load
- Line 2: <label>
**Resultado: Comando !graph**
"""
def __init__(self, message_content):
try:
self.query, self.label = message_content.split("\n")
except Exception:
raise InvalidCommand()
def __call__(self):
f = open(f"saved/{self.label}.grd", "r")
return f.read()
|
pedrohrf/geralda-bot
|
src/commands/load.py
|
load.py
|
py
| 477 |
python
|
en
|
code
| 0 |
github-code
|
6
|
71818859709
|
import sys
import numpy as np
import pickle
Vectors = {}
for line in sys.stdin:
thisLine = line.split()
thisVector = np.array(map(lambda x: float(x), thisLine[1:]))
thisVector/=np.linalg.norm(thisVector)
Vectors[thisLine[0]] = thisVector
pickle.dump(Vectors, sys.stdout)
|
NehaNayak/shiny-octo-lana
|
scripts/numpyizeVectors.py
|
numpyizeVectors.py
|
py
| 278 |
python
|
en
|
code
| 0 |
github-code
|
6
|
72490450747
|
from polygon import *
import math
from functools import wraps,lru_cache
from collections import namedtuple
@validate_type(int)
@validate_params
class Polygon_sequence:
def __init__(self,n,circumradius):
""" This function initializes the number of polygons and circum radius. """
self.n = n
self.circumradius = circumradius
@lru_cache(maxsize=2**10)
def get_polygon(self,vertex , circumradius):
""" This function returns the properties of the polygon such as vertex , circumradius, interior angle, edge length , apothem, area, perimeter as a named tuple.
"""
polygon = Polygon(vertex, circumradius)
interiorAngle = polygon.interiorAngle
edgeLength = polygon.edgeLength
apothem = polygon.apothem
area = polygon.area
perimeter = polygon.perimeter
prop_names = ('vertex' , 'circumradius', 'interiorAngle', 'edgeLength' , 'apothem', 'area', 'perimeter')
properties = namedtuple('Polygon', prop_names)
# print(f'Calculating for Polygon with Vertex:{vertex} , CircumRadius: {circumradius}')
return properties(vertex , circumradius, interiorAngle, edgeLength , apothem, area, perimeter)
def max_efficiency(self):
""" This function returns the maximum efficiency polygon.
Here, a maximum efficiency polygon is one that has the highest area to perimeter ratio.
"""
ratios = []
for i in range(3, self.n+1):
""" This function """
p = self.get_polygon( i , self.circumradius)
ratios.append(p.area/p.perimeter)
# print(ratios)
max_index = max(range(len(ratios)), key=ratios.__getitem__)
# print(ratios)
print(f'Polygon with {max_index+3} vertices has the Max Efficiency of {ratios[max_index]}')
def __getitem__(self,vertex):
""" This function returns the properties of the polygon whose vertices are as passed in the arguments.
It returns 'Not a polygon' message if the number of vertices is less than 3.
"""
if isinstance(vertex,int)==False:
return 'Error: Incorrect type for parameter '
elif vertex <3 :
return 'Error: This is not a polygon'
else:
return self.get_polygon( vertex , self.circumradius)
def __repr__(self):
""" This function gives the details of the Polygon Sequence object"""
return f""" Contains {self.n} polygons with a circum radius of {self.circumradius} and vertices ranging from 3 to {self.n}"""
def __len__(self):
""" This function gives the length of the Polygon Sequence object """
return self.n
|
m-shilpa/EPAI
|
Session_10_Sequence_Types/polygon_sequence.py
|
polygon_sequence.py
|
py
| 2,781 |
python
|
en
|
code
| 0 |
github-code
|
6
|
71477051708
|
import sys
input = sys.stdin.readline
tc = int(input())
INF = int(1e9)
def bellman_ford(start):
distance = [INF] * (n+1)
distance[start] = 0
for i in range(n):
for edge in edges:
current = edge[0]
next = edge[1]
cost = edge[2]
if distance[next] > distance[current] + cost:
distance[next] = distance[current] + cost
if i == n-1:
return True
return False
for _ in range(tc):
# 지점의 수 n, 도로의 개수 m, 웜홈을 개수 w
# 도로는 방향이 없고, 웜홀은 방향이 있다.
n, m, w = map(int, input().split())
# 연결된 도로
edges = []
for _ in range(m):
# s e 연결된 지접의 번호, t 이동하는데 걸리는 시간
s, e, t = map(int, input().split())
edges.append((s, e, t))
edges.append((e, s, t))
# 웜홀
for _ in range(w):
s, e, t = map(int, input().split())
edges.append((s, e, -t))
result = bellman_ford(1)
if result:
print("YES")
else:
print("NO")
|
YOONJAHYUN/Python
|
BOJ/1865.py
|
1865.py
|
py
| 1,136 |
python
|
ko
|
code
| 2 |
github-code
|
6
|
27673839201
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import html2text
class Html2TextPipeline(object):
def process_item(self, item, spider):
for f in ('description_md', 'abstract_md'):
val = item.get(f)
if val:
item[f] = html2text.html2text(val)
return item
|
redapple/pyvideo-contrib
|
pyconfr2015/pyconfr2015/pyconfr2015/pipelines.py
|
pipelines.py
|
py
| 459 |
python
|
en
|
code
| 0 |
github-code
|
6
|
24021214626
|
"""
Allows for platform-specific configuration
options.
"""
import os
from enum import Enum
from typing import Any, Dict
class BaseOrigin(Enum):
"""
Enum for the origin of the base path
"""
ENV = 1
"""The base path is specified in the environment"""
CONF = 2
"""The base path is specified in the config file"""
DEFAULT = 3
"""The base path is the default one"""
class PlatformsConfig:
"""
Contains some configuration options for
platforms.
"""
base_dir: str
"""
The directory containing all platform-related content (base path)
- nothing should be stored in here directly
"""
base_origin: BaseOrigin
"""The origin of the base path"""
data_dir: str
"""The directory where data of all platforms should be stored"""
__data_dir: str
"""Unparsed version of data_dir"""
conf_dir: str
"""The directory where platforms can store configuration files"""
__conf_dir: str
"""Unparsed version of conf_dir"""
def __init__(self, base_dir: str, base_origin: BaseOrigin, data_dir: str, conf_dir: str):
"""
Creates a new PlatformsConfig object
Args:
base_dir (str): The directory containing all platform-related content (base path) - nothing should be stored in here directly
base_origin (BaseOrigin): The origin of the base path
data_dir (str): The directory where data of all platforms should be stored
conf_dir (str): The directory where platforms can store configuration files
"""
self.__dict__['base_dir'] = base_dir
self.__dict__['base_origin'] = base_origin
if not os.path.isdir(base_dir):
os.makedirs(base_dir)
self.data_dir = data_dir
self.conf_dir = conf_dir
def dumpj(self) -> Dict[str, str]:
"""
Returns a dictionary representation of this object
"""
dump: Dict[str, str] = {
'data_dir': self.data_dir,
'conf_dir': self.conf_dir
}
if self.base_origin == BaseOrigin.CONF:
dump['base_dir'] = self.base_dir
return dump
def __setattr__(self, k: str, v: Any) -> None:
if k in ('data_dir', 'conf_dir',):
self.__dict__[f'__{k}'] = v
self.__dict__[k] = v.replace('$D4V1D_DIR', self.base_dir)
if not os.path.isdir(self.__dict__[k]):
os.makedirs(self.__dict__[k])
elif k == 'base_dir':
self.__dict__[k] = v
self.__dict__['base_origin'] = BaseOrigin.CONF
self.data_dir = self.__data_dir
self.conf_dir = self.__conf_dir
elif k.startswith('_'):
# platform-specific options should always be
# prefixed with an underscore
self.__dict__[k] = v
@classmethod
def loadj(cls, j: Dict[str, str]) -> "PlatformsConfig":
"""
Creates a new PlatformsConfig object from a dictionary
"""
base_dir: str
base_origin: BaseOrigin
if 'base_dir' in j.keys():
base_dir = j['base_dir']
base_origin = BaseOrigin.CONF
elif os.getenv('D4V1D_DIR'):
base_dir = os.getenv('D4V1D_DIR')
base_origin = BaseOrigin.ENV
else:
base_dir = os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')), '_')
base_origin = BaseOrigin.DEFAULT
return PlatformsConfig(base_dir, base_origin, j['data_dir'], j['conf_dir'])
|
MattMoony/d4v1d
|
d4v1d/config/platforms.py
|
platforms.py
|
py
| 3,549 |
python
|
en
|
code
| 34 |
github-code
|
6
|
72013262909
|
import gym
from memory import ReplayBuff
from models import Network
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.utils import clip_grad_norm_
class agent():
'''DQN Agent.
Attribute:
memory: replay buffer to store transition
batch_size: batch size for sampling
epsilon: parameter for epsilon greedy policy
epsilon_decay: step size to decrease epsilon
epsilon_train_start: initial(max) value of epsilon
epsilon_train_end: end(min) value of epsilon
target_update_period: period for target model's hard update
gamma: discount factor
net: RL model to train and select actions
target_net: target model to hard update
optimizer: optimizer for training dqn
train_mode: train a new model of test a model
step_num: current timestep
Arg:
observation_space: shape of observation from environment
action_num: the number of optional discrete actions
replay_capacity: length of memory
batch_size: batch size for sampling
target_update_period: period for target model's hard update
learning_rate: learning rate
epsilon_train_start: max value of epsilon
epsilon_train_end: min value of epsilon
gamma: discout factor
train_mode: train a new model of test a model
'''
def __init__(
self,
scenario,
seed=123,
stack_size=1,
replay_capacity=4096,
batch_size=64,
learning_rate=0.0001,
gamma=0.99,
update_horizon=1,
min_replay_history=128,
update_period=1,
target_update_period=32,
epsilon_train_start=1,
epsilon_train_end=0.01,
epsilon_eval=0.001,
epsilon_decay=0.0001,
# distributional param
v_min = 0.0, # from preliminary experiments over the training games,see section5
v_max = 200.0,
atom_size=51, # see section5,footnote2,in original paper
train_mode=True):
self.env=gym.make(scenario)
self.env.seed(seed)
self.batch_size=batch_size
self.update_period=update_period
self.target_update_period=target_update_period
self.gamma=gamma
#-----dist
self.v_min=v_min
self.v_max=v_max
self.atom_size=atom_size
self.train_mode=train_mode
if min_replay_history<batch_size:
self.min_replay_history=batch_size
else:
self.min_replay_history=min_replay_history
self.action_num=self.env.action_space.n
if self.train_mode:
self.epsilon=epsilon_train_start
self.epsilon_decay=epsilon_decay
self.epsilon_train_start=epsilon_train_start
self.epsilon_train_end=epsilon_train_end
else:
self.epsilon=epsilon_eval
self.device=torch.device("cuda" if torch.cuda.is_available() else "cpu")
print(self.device)
# distributional DQN
# section4.1
# support is the set of atoms
self.support=torch.linspace(self.v_min,self.v_max,self.atom_size).to(self.device)
self.delta_z=float(self.v_max-self.v_min)/(self.atom_size-1)
self.net=Network(self.env.observation_space.shape[0],self.env.action_space.n,atom_size,self.support).to(self.device)
if self.train_mode:
self.memory=ReplayBuff(replay_capacity,self.env.observation_space.shape[0])
self.target_net=Network(self.env.observation_space.shape[0],self.env.action_space.n,atom_size,self.support).to(self.device)
self.target_net.load_state_dict(self.net.state_dict())
self.target_net.eval()
self.optimizer=torch.optim.RMSprop(self.net.parameters(),lr=learning_rate,alpha=0.9,eps=1e-10)
self.loss_func=nn.MSELoss()
else:
self.net.eval()
def select_action(self,state):
if self.epsilon < np.random.random():
with torch.no_grad():
state=torch.from_numpy(state)
action=self.net(state.to(self.device)).detach().cpu().numpy()
#return the index of action
return action.argmax()
else:
return np.random.randint(self.action_num)
def store_transition(self,obs,action,reward,next_obs,done):
self.memory.append(obs,action,reward,next_obs,done)
def update(self):
self.optimizer.zero_grad()
samples=self.memory.sample(self.batch_size)
state=torch.from_numpy(samples["obs"]).to(self.device)
action=torch.from_numpy(samples["action"].reshape(-1,1)).to(self.device)
reward=torch.from_numpy(samples["reward"].reshape(-1,1)).to(self.device)
next_state=torch.from_numpy(samples["next_obs"]).to(self.device)
done=torch.from_numpy(samples["done"].reshape(-1,1)).to(self.device)
# ---------------distribution dqn--------------
with torch.no_grad():
# see algorithms 1 in original paper
# next_action is index
next_action=self.target_net(next_state).argmax(1) # line2-3
next_dist=self.target_net.distributional(next_state) # nextwork output
next_dist=next_dist[range(self.batch_size),next_action] # get distribution by next_action(argmax policy)
t_z=reward+(1-done)*self.gamma*self.support # line7
# reward.shape is (batchsize,1);support,shape is (atomssize);t_z.shape is (batchszie,atomsize)
t_z=t_z.clamp(min=self.v_min,max=self.v_max) # line7
b=(t_z-self.v_min)/self.delta_z # line8
l=b.floor().long() # line9
u=b.ceil().long() # line9
offset=(
torch.linspace(
0,(self.batch_size-1)*self.atom_size,self.batch_size
).long()
.unsqueeze(1)
.expand(self.batch_size,self.atom_size)
.to(self.device)
)
proj_dist = torch.zeros(next_dist.size(),device=self.device)
proj_dist.view(-1).index_add_(0, (l + offset).view(-1), (next_dist * (u.float() - b)).view(-1)) # line11
proj_dist.view(-1).index_add_(0, (u + offset).view(-1), (next_dist * (b - l.float())).view(-1)) # line12
dist=self.net.distributional(state) # line14, p_i(x_t,a_t)
log_p=torch.log(dist[range(self.batch_size),action]) # line14
loss = -(proj_dist*log_p).sum(1).mean() # line14
loss.backward()
# gradinet clipping
# https://pytorch.prg/docs/stable/nn.html#torch.nn.utils.clip_grad_norm_
clip_grad_norm_(self.net.parameters(),1.0,norm_type=1)
self.optimizer.step()
return loss.item()
def target_update(self):
self.target_net.load_state_dict(self.net.state_dict())
def train(self,num_episode):
if not self.train_mode:
return None
step=0
for i in range(num_episode):
r_batch=[]
state=self.env.reset().astype(np.float32)
done=False
while not done:
step+=1
action=self.select_action(state)
next_state,reward,done,_=self.env.step(action)
next_state=next_state.astype(np.float32)
self.store_transition(state,action,reward,next_state,done)
r_batch.append(reward)
state=next_state
if self.memory.size>=self.min_replay_history and step%self.update_period==0:
self.epsilon=max(self.epsilon_train_end,self.epsilon-(self.epsilon_train_start-self.epsilon_train_end)*self.epsilon_decay)
self.update()
if step % self.target_update_period==0:
self.target_update()
print("episode: "+str(i)+" reward_sum: "+str(np.sum(r_batch)))
del r_batch[:]
def test(self,model_path=None,seedlist=None):
if self.train_mode:
return None
if model_path is None:
print("no model to test")
return None
if seedlist is None:
seedlist=[111,123,1234]
self._load(model_path)
for s in seedlist:
self.env.seed(s)
r_batch=[]
state=self.env.reset().astype(np.float32)
done=False
while not done:
step+=1
action=self.select_action(state)
next_state,reward,done,_=self.env.step(action)
next_state=next_state.astype(np.float32)
self.store_transition(state,action,reward,next_state,done)
r_batch.append(reward)
state=next_state
print("seed: "+str(s)+" reward_sum: "+str(np.sum(r_batch)))
del r_batch[:]
def _restore(self,path):
if self.train_mode:
torch.save(self.net.state_dict(),path)
else:
print("testing model,cannot save models")
def _load(self,path):
if self.train_mode:
print("training model,cannot load models")
else:
self.net.load_state_dict(torch.load(path))
def reset(self):
self.memory.ptr=0
self.memory.size=0
if __name__ =='__main__':
torch.set_num_threads(3)
seed=123
np.random.seed(seed)
torch.manual_seed(seed)
train_agent=agent('CartPole-v0',seed=seed)
train_agent.train(15)
|
linnaeushuang/RL-pytorch
|
value-based/distributionalDQN/distributionalDQN_learner.py
|
distributionalDQN_learner.py
|
py
| 9,928 |
python
|
en
|
code
| 8 |
github-code
|
6
|
39131054965
|
import os
import base64
from pathlib import Path
from sys import stdout
from Get_data import get_keys
from encrypt_setting import *
class colors:
def __init__(self):
self.blue = "\033[94m"
self.red = "\033[91m"
self.end = "\033[0m"
self.green = "\033[92m"
col = colors()
def print_hacked():
print(col.red+"""
/| /| ---------------------------
||__|| | |
/ O O\__ Hacked Hacked Hacked |
/ \ operating system |
/ \ \ |
/ _ \ \ ----------------------
/ |\____\ \ ||
/ | | | |\____/ ||
/ \| | | |/ | __||
/ / \ ------- |_____| ||
/ | | | --|
| | | |_____ --|
| |_|_|_| | \----
/\ |
/ /\ | /
/ / | | |
___/ / | | |
|____/ c_c_c_C/ \C_c_c_c
\t By: Unifox
"""+col.end)
def encrypt_executeable():
# 경로 설정
p = Path('/Users/realsung/Desktop')
# base64 인코딩되어있는 key값
key = get_keys()
list_f = []
# 확장자들
extensions = ["*"] # ['jpg', 'png', 'jpeg', 'iso','exe', 'mp3', "mp4", 'zip', 'rar', 'txt', 'iso']
for extension in extensions:
try:
searche = list(p.glob('**/*.{}'.format(extension)))
for File in searche:
File = str(File)
if File.endswith(".unifox"):
pass
else:
#x = x.split("/")[-1]
list_f.append(File)
#print(File)
except OSError:
print("Permission Error")
for i in list_f:
file_name = i.split("/")[-1]
file_path = i.replace(file_name, "")
word = col.blue+"Encryption: "+col.end+str(i)
print(word)
os.chdir(file_path)
encrypt(getkey(base64.b64decode(key)), file_name)
try:
os.remove(file_name)
except OSError:
pass
print(col.green+"\n* Finish Encryption *\n")
|
realsung/Ransomeware
|
encrypt.py
|
encrypt.py
|
py
| 2,052 |
python
|
en
|
code
| 2 |
github-code
|
6
|
33644608975
|
from django.shortcuts import render, redirect, get_object_or_404
from .models import Product, Category, Cart, Address, Order
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect
import decimal
from django.contrib.auth.models import User
from django.contrib import messages
from django.db.models import Q
def home(request):
categories = Category.objects.filter(is_active=True, is_featured=True)[0:4]
data_product = Product.objects.filter(is_active=True, is_featured=True)[0:4]
context = {'products':data_product, 'categories':categories}
return render(request, 'index.html', context)
def categories(request):
categories_data = Category.objects.filter(is_active=True, is_featured=True)
context = {'categories':categories_data}
return render(request, 'categories.html', context)
def product_detail(request, slug):
one_product = Product.objects.get(slug=slug)
related_products = Product.objects.exclude(id=one_product.id).filter(is_active=True, category=one_product.category)
context = {'item':one_product, 'related_products':related_products}
return render(request, 'product_detail.html', context)
@login_required(login_url='signin')
def add_to_cart(request):
user = request.user
product_id = request.GET.get('product_id')
product = get_object_or_404(Product, id=product_id)
# Check Whether the Product is Already in Cart or Not
item_already_in_cart = Cart.objects.filter(product=product_id, user=user)
if item_already_in_cart:
cp = get_object_or_404(Cart, product=product_id, user=user)
cp.quantity += 1
cp.save()
else:
Cart(user=user, product=product).save()
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
@login_required(login_url='signin')
def cart(request):
user = request.user
cart_product = Cart.objects.filter(user=user)
amount = decimal.Decimal(0)
shipping_charges = decimal.Decimal(100)
cp = [p for p in Cart.objects.all() if p.user == user]
if cp:
for p in cp:
temp_amount = (p.quantity * p.product.price)
amount += temp_amount
# Customer Address
address = Address.objects.filter(user=user)
context = {
'cart_products':cart_product,
'amount':amount,
'shipping_charges':shipping_charges,
'address':address,
'total':amount + shipping_charges,
}
return render(request, 'cart.html', context)
@login_required(login_url='signin')
def remove_cart(request, id):
cd = Cart.objects.get(id=id)
cd.delete()
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
@login_required(login_url='signin')
def profile(request):
orders = Order.objects.filter(user=request.user)
user = User.objects.get(username=request.user)
address = Address.objects.filter(user=request.user)
context = {'user':user, 'useraddress':address, 'orders':orders}
return render(request, 'profile.html', context)
@login_required(login_url='signin')
def address(request):
if request.GET.get('q'):
mycheck = request.GET.get('q')
if mycheck:
if request.method == 'POST':
locality = request.POST['locality']
city = request.POST['city']
state = request.POST['state']
get_address = Address(user=request.user, locality=locality, city=city, state=state)
get_address.save()
messages.success(request, 'Address has been Added.')
return redirect('checkout')
else:
if request.method == 'POST':
locality = request.POST['locality']
city = request.POST['city']
state = request.POST['state']
get_address = Address(user=request.user, locality=locality, city=city, state=state)
get_address.save()
messages.success(request, 'Address has been Added.')
return redirect('profile')
return render(request, 'address.html')
def trash_address(request, id):
del_address = Address.objects.get(id=id)
del_address.delete()
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
def plus_cart(request, cart_id):
cp = get_object_or_404(Cart, id=cart_id)
cp.quantity += 1
cp.save()
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
def minus_cart(request, cart_id):
cp = get_object_or_404(Cart, id=cart_id)
if cp.quantity == 1:
cp.delete()
else:
cp.quantity -= 1
cp.save()
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
@login_required(login_url='signin')
def checkout(request):
if request.method == 'POST':
try:
radioAddress = request.POST['radioAddress']
order_address = Address.objects.get(user=request.user, id=radioAddress)
cart_items = Cart.objects.filter(user=request.user)
for cart in cart_items:
price = cart.quantity * cart.product.price
orders = Order(user=request.user, address=order_address, product=cart.product, quantity=cart.quantity, total_price=price)
orders.save()
cart.delete()
return redirect('orders')
except Exception as error:
print(error)
messages.error(request, 'Add Shipping Address.')
check_address = Address.objects.filter(user=request.user)
total_cart_amount = Cart.objects.filter(user=request.user)
total_amount = decimal.Decimal(0)
shipping_charges = decimal.Decimal(100)
for cart in total_cart_amount:
carts = cart.quantity * cart.product.price
total_amount += carts
context = {
'address':check_address,
'price_amount':total_amount,
'shipping_charges':shipping_charges,
'total_amount':total_amount + shipping_charges
}
return render(request, 'checkout.html', context)
@login_required(login_url='signin')
def orders(request):
all_orders = Order.objects.filter(user=request.user).order_by('-ordered_date')
context = {'all_orders':all_orders}
return render(request, 'orders.html', context)
# Categories
def category_product(request, slug):
category = get_object_or_404(Category, slug=slug)
products = Product.objects.filter(is_active=True, category=category)
context = {'products':products, 'category':category}
return render(request, 'search.html', context)
def search(request):
search_query = request.GET.get('q')
if len(search_query) > 80:
products = Product.objects.none()
else:
# product_title = Product.objects.filter(is_active=True, title__icontains=search_query)
# product_category = Product.objects.filter(is_active=True, category__icontains=search_query)
# product_short_desc = Product.objects.filter(is_active=True, short_description__icontains=search_query)
# product_long_desc = Product.objects.filter(is_active=True, detail_description__icontains=search_query)
# products = product_title.values_list().union(product_short_desc.values_list(), product_long_desc.values_list())
products = Product.objects.filter(is_active=True, title__icontains=search_query)
# if products.count() == 0:
# messages.warning(request, 'Query Not Found.')
context = {'products':products, 'query':search_query}
return render(request, 'search.html', context)
|
digital-era-108/Ecommerce-django
|
storeApp/views.py
|
views.py
|
py
| 7,779 |
python
|
en
|
code
| 0 |
github-code
|
6
|
31280749903
|
import numpy as np, glob, face_recognition, ntpath, pickle, os
from os.path import basename
from shutil import copyfile
def copy_face_image():
for i in glob.glob("data/avatars/*.jpg"):
image = face_recognition.load_image_file(i)
face_locations = face_recognition.face_locations(image)
if face_locations:
print(i)
copyfile(i, "data/human_face/" + ntpath.basename(i))
def encoding():
known_faces = []
name_index = []
for i in glob.glob("data/human_face/*.jpg"):
print(i)
image = face_recognition.load_image_file(i)
face_encoding = face_recognition.face_encodings(image)[0]
known_faces.append(face_encoding)
filename = os.path.splitext(basename(i))[0]
name_index.append(filename)
with open('data/encodings', 'wb') as fp:
pickle.dump(known_faces, fp)
with open('data/index', 'wb') as fp:
pickle.dump(name_index, fp)
def test_encoding():
with open('data/encodings', 'rb') as fp:
known_faces = pickle.load(fp)
with open('data/index', 'rb') as fp:
name_index = pickle.load(fp)
test_file = "data/avatars/4557.jpg"
image = face_recognition.load_image_file(test_file)
face_encoding = face_recognition.face_encodings(image)[0]
face_distances = face_recognition.face_distance(known_faces, face_encoding)
min_index = np.argmin(face_distances)
print(name_index[min_index])
#copy_face_image()
encoding()
test_encoding()
|
chechiachang/scouter
|
face_recognition/encoding_file_generator.py
|
encoding_file_generator.py
|
py
| 1,509 |
python
|
en
|
code
| 13 |
github-code
|
6
|
35474166745
|
from collections import defaultdict
from warhound import util
class PlayerRoundStats:
__slots__ = ('raw')
def __init__(self):
self.raw = None
class RoundSummary:
__slots__ = ('raw', 'list_dict_player_round_stats_by_player_id',
'dict_player_round_stats_by_player_id')
def __init__(self):
self.raw = None
self.list_dict_player_round_stats_by_player_id = util.mk_oil()
self.dict_player_round_stats_by_player_id = {}
class TeamUpdate:
__slots__ = ('raw')
def __init__(self):
self.raw = None
class Outcome:
__slots__ = ('raw', 'list_round_summary', 'dict_team_update_by_team_id')
def __init__(self):
self.raw = None
self.list_round_summary = []
self.dict_team_update_by_team_id = {}
def mk_player_round_stats():
return PlayerRoundStats()
def mk_round_summary():
round_summary = RoundSummary()
# one dict for each side...
round_summary.list_dict_player_round_stats_by_player_id.append({})
round_summary.list_dict_player_round_stats_by_player_id.append({})
return round_summary
def mk_team_update():
return TeamUpdate()
def mk_outcome(num_rounds):
outcome = Outcome()
outcome.list_round_summary = \
[mk_round_summary() for i in range(0, num_rounds)]
return outcome
def process_round_stats(round_summary, data, state):
player_id = data['userID']
side = state['dict_side_by_player_id'][player_id]
player_round_stats = mk_player_round_stats()
player_round_stats.raw = data
round_summary.list_dict_player_round_stats_by_player_id[side][player_id] = \
player_round_stats
round_summary.dict_player_round_stats_by_player_id[player_id] = \
player_round_stats
return None
def process_round_finished_event(outcome, data, state):
ordinal = data['round']
round_summary = outcome.list_round_summary[ordinal]
round_summary.raw = data
for obj_stats in data['playerStats']:
process_round_stats(round_summary, obj_stats, state)
return None
def process_match_finished_event(outcome, data, state):
outcome.raw = data
return None
def process_team_update_event(outcome, data, state):
team_id = data['teamID']
team_update = mk_team_update()
team_update.raw = data
outcome.dict_team_update_by_team_id[team_id] = team_update
return None
PROCESSOR_BY_EVENT_TYPE = \
{ 'Structures.RoundFinishedEvent': process_round_finished_event,
'Structures.MatchFinishedEvent': process_match_finished_event,
'com.stunlock.battlerite.team.TeamUpdateEvent':
process_team_update_event }
|
odeumgg/warhound
|
warhound/outcome.py
|
outcome.py
|
py
| 2,834 |
python
|
en
|
code
| 1 |
github-code
|
6
|
25272911980
|
T = int(input())
for _ in range(T):
L = list(map(int, input().split()))
a = L[0]
b = L[1] % 4 if L[1] % 4 != 0 else 4
test = pow(a, b)
if test % 10 == 0:
print(10)
else:
print(test % 10)
|
powerticket/algorithm
|
Baekjoon/B3_1009_solved.py
|
B3_1009_solved.py
|
py
| 226 |
python
|
en
|
code
| 0 |
github-code
|
6
|
8316768665
|
from brownie import FundMe
from scripts.helpful_scripts import get_account
def fund():
# Set variable fund_me to the latest deployment of the FundMe contract
fund_me = FundMe[-1]
account = get_account()
entrance_fee = fund_me.getEntranceFee()
print(entrance_fee)
print(f"The current entry fee is {entrance_fee}")
print("Funding")
fund_me.fund({"from": account, "value": entrance_fee})
def withdraw():
fund_me = FundMe[-1]
account = get_account()
fund_me.withdraw({"from": account})
# 0.025000000000000000 ether (at 1 ETH = $2000)
def main():
fund()
withdraw()
|
AgenP/brownie_fund_me
|
scripts/fund_and_withdraw.py
|
fund_and_withdraw.py
|
py
| 618 |
python
|
en
|
code
| 0 |
github-code
|
6
|
27535979388
|
import os
import matplotlib.pyplot as plt
def get_project_path(project_name):
"""
:param project_name: 项目名称,如pythonProject
:return: ******/project_name
"""
# 获取当前所在文件的路径
cur_path = os.path.abspath(os.path.dirname(__file__))
# 获取根目录
return cur_path[:cur_path.find(project_name)] + project_name
def draw_img_groups(img_groups: list, imgs_every_row: int = 8, block: bool = True, show_time: int = 5):
num_groups = len(img_groups)
for i in range(num_groups):
assert img_groups[i].shape[0] >= imgs_every_row
img_groups[i] = img_groups[i].cpu().squeeze(1).detach().numpy()
fig = plt.figure()
gs = fig.add_gridspec(num_groups, imgs_every_row)
for i in range(num_groups):
for j in range(imgs_every_row):
ax = fig.add_subplot(gs[i, j])
ax.imshow(img_groups[i][j], cmap="gray")
ax.axis("off")
plt.tight_layout()
plt.show(block=block)
if not block:
plt.pause(show_time)
plt.close("all")
print(get_project_path(project_name="Defense"))
|
fym1057726877/Defense
|
utils.py
|
utils.py
|
py
| 1,115 |
python
|
en
|
code
| 0 |
github-code
|
6
|
40243178863
|
import cv2
from snooker_table import find_snooker_table
from balls import find_balls
from holes import find_holes
# Videó feldolgozás
def process_video(input_path, output_path):
# Open the video file
video_capture = cv2.VideoCapture(input_path)
# Get video properties
frame_width = int(video_capture.get(cv2.CAP_PROP_FRAME_WIDTH))
frame_height = int(video_capture.get(cv2.CAP_PROP_FRAME_HEIGHT))
fps = int(video_capture.get(cv2.CAP_PROP_FPS))
# Create VideoWriter object to save the processed video
out = cv2.VideoWriter(output_path, cv2.VideoWriter_fourcc(*'mp4v'), fps, (frame_width, frame_height))
frame_count = 0
previous_result = {}
balls_expected_location = {}
disappeared_balls = {}
balls_in_pocket = {
"top_left": [],
"top_middle": [],
"top_right": [],
"bottom_left": [],
"bottom_middle": [],
"bottom_right": []
}
while video_capture.isOpened():
frame_count += 1
ret, frame = video_capture.read()
if not ret:
break
# Find snooker table boundaries
snooker_table, x, y, w, h = find_snooker_table(frame.copy())
# Find balls present on table
# RESULT CONTAINS LOCATION OF BALLS
result = {
"red": []
}
balls = find_balls(snooker_table.copy(), result)
# Itt nézzük, hogy előző frame-ben volt, most nincs golyó
for prev_key in previous_result:
if prev_key != 'white' and prev_key not in result:
disappeared_balls[prev_key] = frame_count
if prev_key == 'red' and len(previous_result[prev_key]) != len(result[prev_key]):
pass # TODO: szükséges a red-ek cimkezese, jelenleg nem tudjuk megkülönböztetni őket
# Itt nézzük, hogy tényleg eltűnt-e
for disappeared_ball in list(disappeared_balls.keys()):
if disappeared_ball in result.keys():
del disappeared_balls[disappeared_ball]
else:
if disappeared_balls[disappeared_ball] > 10: # Eltűnés frame határérték
del disappeared_balls[disappeared_ball]
balls_in_pocket[balls_expected_location[disappeared_ball]].append(disappeared_ball)
previous_result = result
# Find holes
holes = find_holes(snooker_table.copy(), balls_in_pocket)
# Final image
final_image = cv2.addWeighted(balls, 0.5, holes, 0.5, 0)
# Lehetséges leütések számítása
for ball in result:
white = result.get("white")
other = result.get(ball)
golyok = []
if ball == "red":
golyok = other
else:
golyok.append(other)
for golyo in golyok:
if (white and golyo) and (white != golyo):
white_ball_position = (white.get("x"), white.get("y")) # Example coordinates of the white ball (x, y)
other_ball_position = (golyo.get("x"), golyo.get("y")) # Example coordinates of the other ball (x, y)
# Calculate the line between the two points
line_thickness = 2
cv2.line(final_image, white_ball_position, other_ball_position, (255, 0, 0), line_thickness)
# Calculate the extended line beyond the other ball's position
delta_x = other_ball_position[0] - white_ball_position[0]
delta_y = other_ball_position[1] - white_ball_position[1]
temp_x, temp_y = other_ball_position
while 0 < temp_x < final_image.shape[1] and 0 <= temp_y < final_image.shape[0]:
temp_x += delta_x
temp_y += delta_y
extended_position = (int(temp_x), int(temp_y))
# Draw the extended line
cv2.line(final_image, other_ball_position, extended_position, (0, 255, 0), line_thickness)
########################################################################################################
# Hova mehet be a golyó, ha most leütjük?
# Initial position of the line starting from the other ball
current_x, current_y = other_ball_position
# Iterate and extend the line until reaching the edge of the image
while 0 <= current_x < final_image.shape[1] and 0 <= current_y < final_image.shape[0]:
current_x += delta_x
current_y += delta_y
if current_x > final_image.shape[1]:
current_x = final_image.shape[1]
if current_y < final_image.shape[0]:
current_y = final_image.shape[0]
# Mark the final point where the line reaches the edge
final_position = (int(current_x), int(current_y))
# Find the intersection point with the image boundary
max_x, max_y = final_image.shape[1], final_image.shape[0]
if delta_x == 0: # Vertical line
final_position = (other_ball_position[0], 0 if delta_y < 0 else max_y - 1)
else:
slope = delta_y / delta_x
if abs(slope) <= max_y / max_x: # Intersects with left or right boundary
final_position = (
0 if delta_x < 0 else max_x - 1, int(other_ball_position[1] - slope * other_ball_position[0]))
else: # Intersects with top or bottom boundary
final_position = (int(other_ball_position[0] - (1 / slope) * (
other_ball_position[1] - (0 if delta_y < 0 else max_y - 1))),
0 if delta_y < 0 else max_y - 1)
cv2.circle(final_image, final_position, 5, (0, 0, 255), -1)
########################################################################################################
# Melyik lyukhoz lenne a golyó legközelebb?
top_boundary = int(final_image.shape[0] * 0.2) # 20% of the image height
bottom_boundary = int(final_image.shape[0] * 0.8) # 80% of the image height
left_boundary = int(final_image.shape[1] * 0.333) # 33.3% of the image width
right_boundary = int(final_image.shape[1] * 0.666) # 66.6% of the image width
# Check the position of the marked point relative to the defined boundaries
if final_position[1] < top_boundary:
if final_position[0] < left_boundary:
balls_expected_location[ball] = "top_left"
elif left_boundary <= final_position[0] <= right_boundary:
balls_expected_location[ball] = "top_middle"
else:
balls_expected_location[ball] = "top_right"
elif final_position[1] > bottom_boundary:
if final_position[0] < left_boundary:
balls_expected_location[ball] = "bottom_left"
elif left_boundary <= final_position[0] <= right_boundary:
balls_expected_location[ball] = "bottom_middle"
else:
balls_expected_location[ball] = "bottom_right"
# Write the processed frame to the output video
frame[y:y + h, x:x + w] = final_image
out.write(frame)
# Display the processed frame (optional)
cv2.imshow('Processed Frame', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# Release video capture and writer
video_capture.release()
out.release()
cv2.destroyAllWindows()
|
hirschabel/SZTE-snooker
|
snooker/process.py
|
process.py
|
py
| 8,137 |
python
|
en
|
code
| 0 |
github-code
|
6
|
25294948506
|
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponse
from .forms import *
from .models import *
import psycopg2
from mysite.settings import DATABASES
from psycopg2.extras import RealDictCursor
def db_answer(query):
try:
connection = psycopg2.connect(
database=DATABASES['default']['NAME'],
user=DATABASES['default']['USER'],
host=DATABASES['default']['HOST'],
port=DATABASES['default']['PORT'],
password=DATABASES['default']['PASSWORD']
)
cursor = connection.cursor(cursor_factory=RealDictCursor)
cursor.execute(query)
result = cursor.fetchall()
return result
except Exception as err:
print(err)
print('ЧТо то пошло не так!')
finally:
cursor.close()
connection.close()
def index(request):
query1 = """SELECT * FROM employees_worker;"""
all_workers = db_answer(query1)
query2 = """SELECT * FROM employees_category;"""
all_cat = db_answer(query2)
query3 = """SELECT * FROM employees_position;"""
all_pos = db_answer(query3)
context = {
'workers': all_workers,
'cats': all_cat,
'positoin': all_pos,
'title': 'Главная страница',
}
return render(request, 'employees/index.html', context=context)
def adduser(request):
# добавление сотрудника
if request.method == 'POST': # проверка валиднасти данных, если данные не верны вернёться заполненная форма
form = AddWorkerForm(request.POST)
if form.is_valid():
try:
Worker.objects.create(**form.cleaned_data)
return redirect('home')
except:
form.add_error(None, "Ошибка добавления поста!")
else:
form = AddWorkerForm()
query2 = """SELECT * FROM employees_category;"""
all_cat = db_answer(query2)
context = {
'cats': all_cat,
'form': form,
'title': 'Добавление сотрудника',
}
return render(request, 'employees/adduser.html', context=context)
def create_user(request, w_id):
# редактирование должности
query2 = """SELECT * FROM employees_category;"""
all_cat = db_answer(query2)
work_up = Worker.objects.get(pk=w_id)
if request.method == 'POST': # проверка валиднасти данных, если данные не верны вернёться заполненная форма
form = AddWorkerForm(request.POST, instance=work_up)
if form.is_valid():
form.save()
return redirect('home')
else:
form = AddWorkerForm(instance=work_up)
context = {
'cats': all_cat,
'form': form,
'title': 'Главная страница',
}
return render(request, 'employees/create_user.html', context=context)
def add_position(request):
# добавление дожности
if request.method == 'POST': # проверка валиднасти данных, если данные не верны вернёться заполненная форма
form = AddPositionForm(request.POST)
if form.is_valid():
form.save()
return redirect('home')
else:
form = AddPositionForm()
query2 = """SELECT * FROM employees_category;"""
all_cat = db_answer(query2)
context = {
'cats': all_cat,
'form': form,
'title': 'Добавление должности',
}
return render(request, 'employees/add_position.html', context=context)
def show_worker(request, pos_id):
# выводит всех сотрудников работающих на конкретной должности
query1 = f"SELECT * FROM employees_worker WHERE pos_id = {pos_id};"
all_workers = db_answer(query1)
query2 = """SELECT * FROM employees_category;"""
all_cat = db_answer(query2)
query3 = f"SELECT * FROM employees_position"
all_pos = db_answer(query3)
context = {
'workers': all_workers,
'cats': all_cat,
'positoin': all_pos,
'title': 'Главная страница',
}
return render(request, 'employees/show_worker.html', context=context)
def show_cats(request, cat_id):
# Выводит все должности в категории
form = AddPositionForm()
query2 = """SELECT * FROM employees_category;"""
all_cat = db_answer(query2)
query3 = f"SELECT * FROM employees_position WHERE cat_id = {cat_id};"
all_pos = db_answer(query3)
context = {
'cats': all_cat,
'positoin': all_pos,
'form': form,
'title': 'Главная страница',
}
return render(request, 'employees/show_cats.html', context=context)
def create_position(request, pos_id):
# редактирование должности
query2 = """SELECT * FROM employees_category;"""
all_cat = db_answer(query2)
query3 = f"SELECT * FROM employees_position WHERE id = {pos_id};"
pos = db_answer(query3)
pos_up = Position.objects.get(pk=pos_id)
if request.method == 'POST': # проверка валиднасти данных, если данные не верны вернёться заполненная форма
form = AddPositionForm(request.POST, instance=pos_up)
if form.is_valid():
form.save()
return redirect('home')
else:
form = AddPositionForm(instance=pos_up)
context = {
'cats': all_cat,
'positoin': pos,
'form': form,
'title': 'Главная страница',
}
return render(request, 'employees/create_position.html', context=context)
def del_worker(request, w_id):
get_worker = Worker.objects.get(pk=w_id)
get_worker.delete()
return redirect(reverse('home'))
def del_position(request, pos_id):
get_pos = Position.objects.get(pk=pos_id)
get_pos.delete()
return redirect(reverse('home'))
|
Fastsnai1/Employee_log
|
mysite/employees/views.py
|
views.py
|
py
| 6,205 |
python
|
en
|
code
| 0 |
github-code
|
6
|
17034068791
|
import logging, os, json
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework import status
from app.worker.tasks import recommend_options_exe
logger = logging.getLogger(__name__)
@api_view(['GET'])
def get_recommend_options(request, format=None):
key = os.environ.get('STOCK_API_KEY')
if request.META['HTTP_AUTHORIZATION'] == None or key != request.META['HTTP_AUTHORIZATION']:
return Response('', status=status.HTTP_401_UNAUTHORIZED)
if request.method == 'POST':
data = request.data
if 'tickers' in data:
for ticker in data['tickers']:
recommend_options_exe.delay(ticker)
return Response(data, status=status.HTTP_200_OK)
return Response('', status=status.HTTP_403_FORBIDDEN)
|
dearvn/tdameritrade-bot
|
app/api/views.py
|
views.py
|
py
| 816 |
python
|
en
|
code
| 1 |
github-code
|
6
|
8829781188
|
from Node import Node
# Node pointers
head = None
second = None
third = None
# Assign data using constructor.
head = Node(10)
second = Node(20)
third = Node(30)
# Assign data using "." operator.
# head.data = 10
# second.data = 20
# third.data = 30
head.next = second # Link first (head) node with second.
second.next = third # Link second node with the third.
third.next = None # Set last Node (tail) as NULL.
print(f"Value in the First Node (head): {head.data}")
print(f"Value in the Second Node: {second.data}")
print(f"Value in the Third Node (tail): {third.data}")
|
drigols/studies
|
modules/algorithms-and-ds/modules/data-structures/linear/lists/src/python/singly-linked-list/driver_insert_using_node_class.py
|
driver_insert_using_node_class.py
|
py
| 593 |
python
|
en
|
code
| 0 |
github-code
|
6
|
35935636198
|
from gpt4all import GPT4All
import asyncio
import websockets
import datetime
print(r'''
$$$$$$\ $$\ $$\ $$$$$$\ $$$$$$\
$$ __$$\ $$ | \__| $$ __$$\ \_$$ _|
$$ / $$ |$$ | $$\ $$$$$$\ $$$$$$\ $$\ $$ / $$ | $$ |
$$$$$$$$ |$$ | $$ |\____$$\ $$ __$$\ $$ | $$$$$$$$ | $$ |
$$ __$$ |$$$$$$ / $$$$$$$ |$$ | \__|$$ | $$ __$$ | $$ |
$$ | $$ |$$ _$$< $$ __$$ |$$ | $$ | $$ | $$ | $$ |
$$ | $$ |$$ | \$$\\$$$$$$$ |$$ | $$ | $$ | $$ |$$$$$$\
\__| \__|\__| \__|\_______|\__| \__| \__| \__|\______|
''')
print('Akari AI Server v1.2b\nhttps://github.com/76836/Akari')
print('[ok]Preparing AI...')
model = GPT4All("gpt4all-falcon-q4_0.gguf", allow_download=True)
prompt_template = 'USER: {0}\nAKARI: '
tokens_added = ""
print('[ok]Server running.')
async def handle_websocket(websocket, path):
try:
while True:
message = await websocket.recv()
uprompt = message
print(f"[in]Received message: {uprompt}")
if (message == "test"):
print('\n[ok]Testing connection.\n')
response = f"Akari AI v1.2b connected"
else:
prompts = [uprompt]
current_time = datetime.datetime.now(datetime.UTC).strftime("%Y-%m-%d %H:%M:%S")
print('[ok]Generating response... ('+current_time+')')
system_template = '''You are Akari Crimson AI, you have a kind, joking personality. Write detailed quick answers for any question. Give one answer at a time.
(Akari AI Server v1.2b, system time:'''+current_time+''')'''
first_input = system_template + prompt_template.format(prompts[0])
def stop_on_token_callback(token_id, token_string):
global tokens_added
tokens_added = tokens_added + token_string
if "USER:" in tokens_added:
print('[ok]Generation stopped.')
tokens_added =''
return False
# If the string is not found, continue generating tokens
return True
response = model.generate(first_input, max_tokens=512, temp=0.7, top_k=40, top_p=0.4, repeat_penalty=1.99, repeat_last_n=512, n_batch=8, n_predict=None, callback=stop_on_token_callback)
response = response.replace('USER:', '')
await websocket.send(response)
tokens_added = ''
print(f"[out]Sent message: {response}")
except websockets.exceptions.ConnectionClosed:
print("\n[Error]WebSocket connection closed\n")
start_server = websockets.serve(handle_websocket, "localhost", 8765)
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
|
76836/Akari
|
experimental/server.py
|
server.py
|
py
| 3,098 |
python
|
en
|
code
| 0 |
github-code
|
6
|
17640174097
|
"""
Use blender to convert FBX (T-pose) to BVH file
"""
import os
import bpy
import numpy as np
def get_bvh_name(filename):
filename = filename.split(".")[-2]
return filename + ".bvh"
def main():
fbx_dir = "./mixamo/fbx/"
bvh_dir = "./mixamo/bvh/"
for filename in os.listdir(fbx_dir):
fbx_path = os.path.join(fbx_dir, filename)
bvh_path = os.path.join(bvh_dir, get_bvh_name(filename))
bpy.ops.import_scene.fbx(filepath=fbx_path)
action = bpy.data.actions[-1]
frame_start = action.frame_range[0]
frame_end = max(60, action.frame_range[1])
bpy.ops.export_anim.bvh(filepath=bvh_path,
frame_start=int(frame_start),
frame_end=int(frame_end),
root_transform_only=True)
bpy.data.actions.remove(bpy.data.actions[-1])
if __name__ == "__main__":
main()
|
awkrail/mixamo_preprocessor
|
fbx2bvh.py
|
fbx2bvh.py
|
py
| 935 |
python
|
en
|
code
| 0 |
github-code
|
6
|
1499727625
|
from bs4 import BeautifulSoup
import json
import logging
import requests
from Mongo import client as MongoClient
from Redis import client as RedisClient
WEB = 'https://www.gensh.in/events/promotion-codes'
class RequestErrorException(Exception):
def __init__(self, message, resp):
super().__init__(message)
self.resp = resp
self.message = message
def scrapper():
logging.info("Running scrapper")
try:
codes = _scrapCodes()
except RequestErrorException as e:
logging.warning("%s\nStatus code %s", e.message, e.resp.status_code)
return
for code in codes:
if MongoClient.existCode(code["id"]):
continue
else:
logging.info("[%s] - New code detected",code["id"])
RedisClient.sendCode(json.dumps(code))
logging.info("[%s] - Notification sent",code["id"])
MongoClient.insertCode(code)
logging.info("[%s] - Saved on Mongo",code["id"])
logging.info("Scrapper finished")
def _scrapCodes():
resp = requests.get(WEB)
if not resp:
raise RequestErrorException(resp=resp,message="There was an error on the request")
source = resp.text
soup = BeautifulSoup(source, 'lxml')
table = soup.find('table')
headers = [heading.text for heading in table.find_all('th')]
table_rows = [row for row in table.find_all('tr')]
results = [{headers[index] : cell.text for index,cell in enumerate(row.find_all('td')) } for row in table_rows]
while {} in results:
results.remove({})
results = list(map(_formatter,results))
codes = []
for result in results:
code = {
"id":result["NA"],
"date_added":result["Date Added"],
"rewards":result["Rewards"],
"expired":result["Expired"],
"eu":result["EU"],
"na":result["NA"],
"sea":result["SEA"]
}
codes.append(code)
return codes
def _formatter(result):
result = {x.strip(): v.strip()
for x, v in result.items()}
return result
|
BRAVO68WEB/genshin-notify
|
scrapperCodes/scrapper.py
|
scrapper.py
|
py
| 2,107 |
python
|
en
|
code
| 1 |
github-code
|
6
|
19499795421
|
# -*- coding: utf-8 -*-
class Solution:
def fromMiddles(self, mid_f: int, mid_s: int, s: str) -> (int, int):
dis = start = end = 0
# condition ensures we don't point beyond the beginning of the word
while dis <= mid_f:
pstart = mid_f - dis
pend = mid_s + dis
if len(s) - pend == 0:
break # reached end of word
if s[pstart] == s[pend]:
end = pend
start = pstart
else:
break
dis += 1
return start, end
def longestPalindrome(self, s: str) -> str:
longest = ""
for ix in range(len(s)):
st, en = self.fromMiddles(ix, ix, s)
# if we're not on the last letter, consider an even palindrome
if ix + 1 <= len(s):
dst, den = self.fromMiddles(ix, ix + 1, s)
if den - dst > en - st:
st = dst
en = den
if en - st + 1 > len(longest):
longest = s[st : en + 1]
return longest
# vim: autoindent tabstop=4 shiftwidth=4 expandtab softtabstop=4
|
michaeldye/mdye-python-samples
|
src/mdye_leetcode/solution_5.py
|
solution_5.py
|
py
| 1,177 |
python
|
en
|
code
| 0 |
github-code
|
6
|
10230270575
|
import numpy as np
from pyparsing import Any
import evals
import evals.elsuite.steganography.steganography as steganography
import evals.metrics
from evals.api import CompletionFn, DummyCompletionFn
from evals.eval import Eval
from evals.record import RecorderBase
class Steganography(Eval):
def __init__(
self,
completion_fns: list[CompletionFn],
samples_jsonl: str,
*args,
prompt_version: str = "scratch",
reconstruction_error_metric: str = "semantic_distance",
**kwargs,
):
super().__init__(completion_fns, *args, **kwargs)
if len(completion_fns) == 1 and isinstance(
completion_fns[0], DummyCompletionFn
):
completion_fn = completion_fns[0]
completion_fns = [completion_fn, completion_fn]
assert (
len(completion_fns) == 2
), f"Steganography requires 2 completion_fns (the 1st is the target model, the 2nd is the monitor model), received {len(completion_fns)}."
self.completion_fns = completion_fns
self.samples_jsonl = samples_jsonl
self.prompt_version = prompt_version
self.reconstruction_error_metric = reconstruction_error_metric
def eval_sample(self, sample: Any, *_):
assert isinstance(sample, dict), "sample must be a dict"
assert "payload_text" in sample, "sample must have a payload_text key"
assert "task_prompt" in sample, "sample must have a task_prompt key"
assert "task_output" in sample, "sample must have a task_output key"
assert isinstance(sample["payload_text"], str), "payload_text must be a string"
assert isinstance(sample["task_prompt"], str), "task_prompt must be a string"
assert isinstance(sample["task_output"], str), "task_output must be a string"
payload = sample["payload_text"]
task_prompt = sample["task_prompt"]
reference_output = sample["task_output"]
results = steganography.run_eval(
task_prompt=task_prompt,
reference_output=reference_output,
payload=payload,
completion_fn=self.completion_fns[0],
monitor_completion_fn=self.completion_fns[1],
prompt_version=self.prompt_version,
metadata=sample,
)
evals.record.record_metrics(
undetected_acc=results["undetected_acc"],
detectability=results["detected"],
reconstruction_error=results[self.reconstruction_error_metric],
character_error_rate_cap1=results["character_error_rate_cap1"],
semantic_distance=results["semantic_distance"],
rule_violated=results["rule_violated"],
)
def run(self, recorder: RecorderBase):
samples = self.get_samples()
self.eval_all_samples(recorder, samples)
metrics = recorder.get_metrics()
return {
"undetected_acc": np.mean([d["undetected_acc"] for d in metrics]),
"detectability": np.mean([d["detectability"] for d in metrics]),
"reconstruction_error": np.mean([d["reconstruction_error"] for d in metrics]),
"character_error_rate_cap1": np.mean([d["character_error_rate_cap1"] for d in metrics]),
"semantic_distance": np.mean([d["semantic_distance"] for d in metrics]),
"rule_violated": np.mean([d["rule_violated"] for d in metrics]),
}
|
openai/evals
|
evals/elsuite/steganography/eval.py
|
eval.py
|
py
| 3,437 |
python
|
en
|
code
| 12,495 |
github-code
|
6
|
28985765692
|
import community as community_louvain
import networkx as nx
#read file
graph_file = 'abide_au_2_4132_sparse.txt'
#label_file='4132_regions.txt'
with open(graph_file) as f:
graph = f.readlines()
graph = [x.strip() for x in graph]
G = nx.parse_edgelist(graph, delimiter="\t", nodetype=int)
partition = community_louvain.best_partition(G)
#print(partition)
comm_file=open("vertex_community_au_sparse2_run2.txt","w")
for comm in sorted(partition):
comm_file.write(str(comm)+'\t'+str(partition[comm])+'\n')
comm_file.close()
num_comm = max(partition.values())+1
vertex_subsets = [set() for i in range(num_comm)]
subgraphs = [set() for i in range(num_comm)]
cut_edges = set()
for key in partition:
vertex_subsets[partition[key]].add(key)
for edge in graph:
u,v=edge.split("\t")
u,v=int(u),int(v)
flag = 0
for comm in range(num_comm):
if u in vertex_subsets[comm] and v in vertex_subsets[comm]:
subgraphs[comm].add(edge)
flag = 1
break
if flag == 0:
cut_edges.add(edge)
#print(cut_edges)
for i in range(num_comm):
print("subgraph ",i," contains ",len(subgraphs[i])," number of edges")
print("Cut edges are - ",len(cut_edges))
subgraph_file=open("subgraphs_au_sparse2_run2.txt","w")
for comm in range(num_comm):
subgraph_file.write(str(comm)+'\n')
subgraph_file.write(str(len(vertex_subsets[comm]))+'\t'+str(len(subgraphs[comm]))+'\n')
#subgraph_file.write(sorted(vertex_subsets[comm]), key = lambda x: (len (x), x))
i = 0
vertid = dict()
for vertex in sorted(vertex_subsets[comm]):
vertid[vertex] = i
i += 1
subgraph_file.write(str(vertex)+'\n')
#subgraph_file.write(sorted(subgraphs[comm]),key = lambda x: (len (x), x))
edgelist = [[0 for k in range(2)] for m in range(len(subgraphs[comm]))]
j=0
for edge in sorted(subgraphs[comm]):
u,v=edge.split("\t")
u,v=int(u),int(v)
edgelist[j][0],edgelist[j][1] = vertid[u],vertid[v]
j += 1
for l in sorted(edgelist,key=lambda x: (x[0],x[1])):
subgraph_file.write(str(l[0])+'\t'+str(l[1])+'\t'+str(1)+'\n')
subgraph_file.close()
|
chandrashekar-cds/Graph-Coarsening
|
louvain_subgraphs_label.py
|
louvain_subgraphs_label.py
|
py
| 2,184 |
python
|
en
|
code
| 0 |
github-code
|
6
|
21053864803
|
# Import the necessary libraries
import PySimpleGUI as sg
import qrcode
# Set the theme for the UI
sg.theme('GreenMono')
# Define the layout for the app
layout = [ [sg.Text('Enter Text: ', font=('Helvetica', 12, 'bold')), sg.InputText(font=('Helvetica', 12), size=(30,1))],
[sg.Button('Create', font=('Helvetica', 12), button_color=('white', '#007F00')), sg.Button('Exit', font=('Helvetica', 12), button_color=('white', 'firebrick'))],
[sg.Image(key='-IMAGE-', size=(200, 150))]
]
# Create the window
window = sg.Window('QR Code Generator', layout)
# Event loop for the app
while True:
# Read events and values from the window
event, values = window.read()
# If the Exit button or window is closed, exit the app
if event in (sg.WIN_CLOSED, 'Exit'):
break
# If the Create button is clicked, generate the QR code image
if event == 'Create':
# Get the text input from the user
data = values[0]
# If the text input is not empty, generate the QR code
if data:
# Generate the QR code image
img = qrcode.make(data)
# Save the QR code image to a file
img.save('qrcode.png')
# Update the image in the UI
window['-IMAGE-'].update(filename='qrcode.png')
# Close the window and exit the app
window.close()
|
haariswaqas/Project2
|
QR Code Generator.py
|
QR Code Generator.py
|
py
| 1,401 |
python
|
en
|
code
| 0 |
github-code
|
6
|
36572760050
|
import random
import colorgram
from turtle import Turtle, Screen
colors = colorgram.extract("./example.jpg", 10)
list_colors = []
for color in colors:
current_color = color.rgb
color_tuple = (current_color[0], current_color[1], current_color[2])
list_colors.append(color_tuple)
porto = Turtle()
porto.penup()
porto.hideturtle()
porto.speed('fast')
screen = Screen()
screen.colormode(255)
number_dots = 100
porto.seth(225)
porto.fd(300)
porto.seth(0)
for dot in range(1, number_dots + 1):
porto.dot(20, random.choice(list_colors))
porto.fd(50)
if dot % 10 == 0:
porto.seth(90)
porto.fd(50)
porto.seth(180)
porto.fd(500)
porto.seth(0)
screen.exitonclick()
|
porto-o/Python_projects
|
18. hirst-painting/main.py
|
main.py
|
py
| 725 |
python
|
en
|
code
| 1 |
github-code
|
6
|
18537329449
|
# prob_link: https://www.codingninjas.com/codestudio/problems/trapping-rain-water_8230693?challengeSlug=striver-sde-challenge&leftPanelTab=0
from os import *
from sys import *
from collections import *
from math import *
def getTrappedWater(height, n) :
# Write your code here.
n = len(height)
pre = [0]*n
suff = [0]*n
for i in range(len(height)):
if i==0:
pre[i]=height[0]
continue
pre[i]=max(pre[i-1],height[i])
n = len(height)
for i in range(len(height)-1,-1,-1):
if i==n-1:
suff[i] = height[-1]
continue
suff[i]=max(suff[i+1],height[i])
ans = 0
#suff = suff[::-1]
for i in range(1,len(suff)-1):
mini = min(suff[i],pre[i])
ans += mini - height[i]
return ans
|
Red-Pillow/Strivers-SDE-Sheet-Challenge
|
P40_Trapping_Rain_Water.py
|
P40_Trapping_Rain_Water.py
|
py
| 729 |
python
|
en
|
code
| 0 |
github-code
|
6
|
9816344184
|
# Nearest stars to Earth
# star1 = 'Sol'
# star2 = 'Alpha Centauri'
# star3 = 'Barnard'
# star4 = 'Wolf 359'
stars = [
"sol",
"Alpaha",
"Barnard",
"Wolf 359"
]
print(stars[3])
# # Highest peak on each tectonic plate
# Antarctic = 'Vinson'
# Australian = 'Puncak Jaya'
# Eurasian = 'Everest'
# North_American = 'Denali'
# Pacific = 'Mauna Kea'
# South_American = 'Aconcagua'
peacks ={
"antractc":"Vinson",
"Australian":"Puncak Jaya",
"Eurasian":"Everest",
"North_American":"Deali",
"Pacific":"Mauna Kea",
"South_American": "Aconcagua"
}
print(peacks["Pacific"])
|
fouad963/linked-in
|
Exercise FilesProgramming Foundations Beyond the Fundamentals/Ch01/01_06/begin/collections.py
|
collections.py
|
py
| 602 |
python
|
en
|
code
| 0 |
github-code
|
6
|
35743712704
|
import sys
import read_write as rw
import numpy as np
import scipy.sparse
from MatrixFactorization import MatrixFactorization
if (__name__ == '__main__'):
finput_dataset = sys.argv[1]
finput_K = (int)(sys.argv[2])
iu_matrix_train_path = "../../Data/" + finput_dataset + "/iu_sparse_matrix_train.npz"
iu_matrix_test_path = "../../Data/" + finput_dataset + "/iu_sparse_matrix_test.npz"
train_item_id_path = "../../Data/" + finput_dataset + "/train_item_id"
test_item_id_path = "../../Data/" + finput_dataset + "/test_item_id"
item_sim_matrix_path = "../../Data/" + finput_dataset + "/item_sim_matrix" # pass
ui_matrix_train = scipy.sparse.load_npz(iu_matrix_train_path).T
ui_matrix_test = scipy.sparse.load_npz(iu_matrix_test_path).T
ui_matrix = scipy.sparse.csr_matrix(np.hstack((ui_matrix_train.toarray(), np.zeros(ui_matrix_test.shape))))
train_item_id = rw.readffile(train_item_id_path)
test_item_id = rw.readffile(test_item_id_path)
item_sim_matrix = rw.readffile(item_sim_matrix_path)
# Computing Score for user (Score = [user number, new item number])
Score = (ui_matrix_train * item_sim_matrix.loc[train_item_id, test_item_id]) / \
((ui_matrix_train != 0) * item_sim_matrix.loc[train_item_id, test_item_id])
# Active Learning
train_item_num = len(train_item_id)
ui_matrix = ui_matrix.tolil()
ui_matrix_test = ui_matrix_test.tolil()
for i in range(len(test_item_id)):
ind = np.argsort(-Score[:, i])
if finput_K < ind.shape[0]:
topK = ind[:(finput_K+1)]
else:
topK = ind
ui_matrix[topK, i+train_item_num] = ui_matrix_test[topK, i]
ui_matrix_test[topK, i] = 0
# Matrix Factorization
nonzero = scipy.sparse.find(ui_matrix)
train_lst = []
for uid, itemid, rating in zip(nonzero[0], nonzero[1], nonzero[2]):
train_lst.append((uid, itemid, float(rating)))
MF = MatrixFactorization(usernum=ui_matrix.shape[0], itemnum=ui_matrix.shape[1])
try:
user_profile, item_profile = MF.matrix_factorization(train_lst)
except:
MF.end()
MF = MatrixFactorization()
user_profile, item_profile = MF.matrix_factorization(train_lst)
pred_rating = np.dot(user_profile, item_profile[train_item_num:, :].T)
nonzero_num = ui_matrix_test.getnnz()
ui_matrix_test_arr = ui_matrix_test.toarray()
RMSE = np.sum(((ui_matrix_test_arr != 0)*(pred_rating - ui_matrix_test_arr))**2 / nonzero_num)**0.5
print("RMSE: %.4f"%RMSE)
MF.end()
|
clamli/Dissertation
|
Baselines/Content-based Active Learning/content_based_active_learning.py
|
content_based_active_learning.py
|
py
| 2,632 |
python
|
en
|
code
| 28 |
github-code
|
6
|
18997085490
|
import torch.nn as nn
from efficientnet_pytorch import EfficientNet
class EfficientNetCustom(nn.Module):
def __init__(self, model_name, in_channels, num_classes,
load_pretrained_weights=True, train_only_last_layer=False):
super(EfficientNetCustom, self).__init__()
self.model_name = model_name
self.in_channels = in_channels
self.num_classes = num_classes
# self.image_size = EfficientNet.get_image_size(self.model_name)
self.load_pretrained_weights = load_pretrained_weights
self.train_only_last_layer = train_only_last_layer
if self.load_pretrained_weights:
self.features = EfficientNet.from_pretrained(self.model_name, in_channels=self.in_channels)
else:
self.features = EfficientNet.from_name(self.model_name, in_channels=self.in_channels)
if self.train_only_last_layer:
print('Training only last layer...')
for param in self.features.parameters():
param.requires_grad = False
in_ftrs = self.features._fc.in_features
self.features._fc = nn.Linear(in_ftrs, self.num_classes)
# self.features._fc.requires_grad = True
def forward(self, inputs):
x = self.features(inputs)
return x
|
sanjeebSubedi/cats-dogs-efficientnet
|
efficientNetCustom.py
|
efficientNetCustom.py
|
py
| 1,204 |
python
|
en
|
code
| 0 |
github-code
|
6
|
74743366586
|
from decimal import Decimal, setcontext, ExtendedContext
setcontext(ExtendedContext)
precision = Decimal('0.000001')
class LocationStore(object):
def __init__(self):
self.locations = {}
def add(self, latitude, longitude, name, url=None, primary=False):
latitude = makedecimal(latitude).quantize(precision).normalize()
longitude = makedecimal(longitude).quantize(precision).normalize()
for location, data in self.locations.iteritems():
(other_lat, other_lon) = location
if latitude == other_lat and longitude == other_lon:
if url is not None:
data['url'] = url
if primary is True:
data['primary'] = True
break
else:
#new location
self.locations[(latitude, longitude)] = {'name': name,
'primary': primary}
if url is not None:
self.locations[(latitude, longitude)]['url'] = url
def to_list(self):
out = []
for location, data in self.locations.iteritems():
(data['latitude'], data['longitude']) = location
out.append(data)
return out
def makedecimal(value):
if isinstance(value, float):
return Decimal(str(value))
else:
return Decimal(value)
|
kurtraschke/cadors-parse
|
src/cadorsfeed/cadorslib/locations.py
|
locations.py
|
py
| 1,387 |
python
|
en
|
code
| 1 |
github-code
|
6
|
5123241016
|
"""
Requests モジュールによりリモートファイルを読み込むサンプル
事前にRequestsモジュールをインストールしましょう
# pip install requests
"""
import requests
url = 'https://it-engineer-lab.com/feed'
try:
r = requests.get(url, timeout=10.0)
print(r.text)
except requests.exceptions.RequestException as err:
print(err)
# ダウンロード(読み込み + ローカル保存)
# ダウンロードして rss.xml というファイル名で保存する例
try:
r = requests.get(url, timeout=10.0)
with open('rss.xml', mode='w') as f:
f.write(r.text)
except requests.exceptions.RequestException as err:
print(err)
|
toksan/python3_study
|
network/get_by_requests.py
|
get_by_requests.py
|
py
| 690 |
python
|
ja
|
code
| 2 |
github-code
|
6
|
1948037648
|
from django.contrib import admin
from django.db.models import QuerySet
from django.db.models.expressions import RawSQL
from django.forms import ModelForm
from django.urls import reverse
from django.utils.safestring import mark_safe
from tree.models import Category
from django.utils.html import format_html
from django import forms
from treebeard.admin import TreeAdmin
@admin.register(Category)
class CategoryAdmin(admin.ModelAdmin):
def get_queryset(self, request):
qs: QuerySet = super(admin.ModelAdmin, self).get_queryset(request)
return qs \
.annotate(parent=RawSQL("""
select id from tree_category tc where tc.depth="tree_category"."depth"-1 and "tree_category"."path" like tc.path || '%%'
""", []
))
list_display = (
'id',
'path',
'depth',
'numchild',
'name',
'parent',
)
class CategoryForm(ModelForm):
parent_link = forms.BooleanField(required=False, disabled=True)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
url = reverse('admin:tree_category_change', args=[self.instance.parent])
self.fields['parent_link'].label = format_html('<a href="{}">Link to Parent</a>', url)
class Meta:
model = Category
fields = '__all__'
form = CategoryForm
def parent(self, obj):
# result = obj.grade_avg
if obj.parent is None:
return format_html("<b><i>{}</i></b>", obj.parent)
url = reverse('admin:tree_category_change', args=[obj.parent])
return format_html('<a href="{}"> Parent</a>', url)
# list_select_related = ()
# raw_id_fields = ("id",)
|
Vulwsztyn/django_treebeard_admin
|
tree/admin.py
|
admin.py
|
py
| 1,783 |
python
|
en
|
code
| 0 |
github-code
|
6
|
72340847227
|
# Tower of Hanoi
# code by Akshat Aryan
def tower(n, src, aux, dest):
if n == 1:
print("Move Disk from", src, "to", dest)
else:
tower(n - 1, src, dest, aux)
print("Move Disk from", src, "to", dest)
tower(n - 1, aux, src, dest)
if(__name__ == "__main__"):
n = int(input("Enter the number of disks:"))
print("Follow these steps:")
tower(n, "Source", "Auxiliary", "Destination")
|
crazydj8/DesignAlgorithms
|
Misc. Algos/Recursive_algos/towerofhanoi.py
|
towerofhanoi.py
|
py
| 437 |
python
|
en
|
code
| 1 |
github-code
|
6
|
27259334450
|
"""We are the captains of our ships, and we stay 'till the end. We see our stories through.
"""
"""1059. All Paths from Source Lead to Destination
"""
class Solution:
def leadsToDestination(self, n, edges, s, d):
visited = [0] * n
graph = [[] for _ in range(n)]
def dfs(i):
if len(graph[i]) == 0:
return i == d
if visited[i] == 1:
return True
if visited[i] == -1:
return False
visited[i] = -1
for neighbour in graph[i]:
if not dfs(neighbour):
return False
visited[i] = 1
return True
for u, v in edges:
graph[u].append(v)
dfs(s)
|
asperaa/back_to_grind
|
Graphs/1059. All Paths from Source Lead to Destination.py
|
1059. All Paths from Source Lead to Destination.py
|
py
| 756 |
python
|
en
|
code
| 1 |
github-code
|
6
|
6323414106
|
from contextlib import suppress
import random
import asyncio
from typing import *
import traceback
import hikari
from hikari import Embed
import lightbulb
from lightbulb import events, errors
from lightbulb.context import Context
from core import Inu
from utils.language import Human
from .help import OutsideHelp
from core import getLogger, BotResponseError, Inu, InteractionContext
log = getLogger("Error Handler")
pl = lightbulb.Plugin("Error Handler")
bot: Inu
ERROR_JOKES = [
"Wait, there is a difference between beta and production?",
"Seems like someone was to lazy to test me -- _again_",
"Y'know: _my_ ordinary life is generating errors",
"You expected me to work properly? Oh please ...",
(
"Y'know I can smell your disappointment. It's right about here: ```\n"
"good bad\n"
" |--------------------|\n"
" ^\n```"
)
]
async def on_exception(event: hikari.ExceptionEvent):
# not user related error
try:
log.error(f"{''.join(traceback.format_exception(event.exception))}")
except Exception:
log.critical(traceback.format_exc())
@pl.listener(events.CommandErrorEvent)
async def on_error(event: events.CommandErrorEvent):
"""
"""
try:
ctx: Context | None = event.context
if not isinstance(ctx, Context):
log.debug(f"Exception uncaught: {event.__class__}")
return
error = event.exception
async def message_dialog(error_embed: hikari.Embed):
error_id = f"{bot.restart_num}-{bot.id_creator.create_id()}-{bot.me.username[0]}"
component=(
hikari.impl.MessageActionRowBuilder()
.add_interactive_button(
hikari.ButtonStyle.PRIMARY,
"error_send_dev_silent",
label="🍭 Send report silently"
)
.add_interactive_button(
hikari.ButtonStyle.PRIMARY,
"error_send_dev",
label="🍭 Add note & send"
)
)
try:
message = await (await ctx.respond(
embed=error_embed,
component=component
)).message()
except Exception:
message = await bot.rest.create_message(
ctx.channel_id,
embed=error_embed,
component=component
)
def check(event: hikari.ReactionAddEvent):
if event.user_id != bot.me.id and event.message_id == message.id:
return True
return False
custom_id, _, interaction = await bot.wait_for_interaction(
custom_ids=["error_send_dev", "error_show", "error_send_dev_silent"],
message_id=message.id,
user_id=ctx.user.id
)
# await interaction.delete_message(message)
embeds: List[Embed] = [Embed(title=f"Bug #{error_id}", description=str(error)[:2000])]
embeds[0].set_author(
name=f'Invoked by: {ctx.user.username}',
icon=ctx.author.avatar_url
)
embeds[0].add_field(
"invoked with",
value=(
f"Command: {ctx.invoked_with}\n"
"\n".join([f"`{k}`: ```\n{v}```" for k, v in ctx.raw_options.items()])
)[:1000]
)
nonlocal event
traceback_list = traceback.format_exception(*event.exc_info)
if len(traceback_list) > 0:
log.warning(str("\n".join(traceback_list)))
error_embed.add_field(
name=f'{str(error.__class__)[8:-2]}',
value=f'Error:\n{error}'[:1024],
)
i = 0
for index, tb in enumerate(traceback_list):
if embeds[-1].total_length() > 6000:
field = embeds[-1]._fields.pop(-1)
embeds.append(Embed(description=f"Bug #{error_id}"))
embeds[-1]._fields.append(field)
i = 0
if i % 20 == 0 and i != 0:
embeds.append(Embed(description=f"Bug #{error_id}"))
embeds[-1].add_field(
name=f'Traceback - layer {index + 1}',
value=f'```python\n{Human.short_text_from_center(tb, 1000)}```',
inline=False
)
i += 1
messages: List[List[Embed]] = [[]]
message_len = 0
for e in embeds:
for field in e._fields:
if not field.value:
field.value = "-"
if message_len == 0:
messages[-1].append(e)
message_len += e.total_length()
else:
if message_len + e.total_length() > 6000:
messages.append([e])
message_len = e.total_length()
else:
messages[-1].append(e)
message_len += e.total_length()
kwargs: Dict[str, Any] = {"embeds": embeds}
answer = ""
if custom_id == "error_show":
await message.edit(embeds=embeds)
if custom_id == "error_send_dev":
try:
answer, interaction, event = await bot.shortcuts.ask_with_modal(
f"Bug report",
question_s="Do you have additional information?",
interaction=interaction,
pre_value_s="/",
)
except asyncio.TimeoutError:
answer = "/"
if answer == "/":
answer = ""
kwargs["content"] = f"**{40*'#'}\nBug #{error_id}\n{40*'#'}**\n\n\n{Human.short_text(answer, 1930)}"
del kwargs["embeds"]
for i, embeds in enumerate(messages):
if i == 0:
message = await bot.rest.create_message(
channel=bot.conf.bot.bug_channel_id,
embeds=embeds,
**kwargs
)
else:
message = await bot.rest.create_message(
channel=bot.conf.bot.bug_channel_id,
embeds=embeds,
)
if interaction:
with suppress():
await interaction.create_initial_response(
hikari.ResponseType.MESSAGE_CREATE,
content=(
f"**Bug #{error_id}** has been reported.\n"
f"You can find the bug report [here]({message.make_link(message.guild_id)})\n"
f"If you can't go to this message, or need additional help,\n"
f"consider to join the [help server]({bot.conf.bot.guild_invite_url})"
),
flags=hikari.MessageFlag.EPHEMERAL,
)
return
# errors which will be handled also without prefix
if isinstance(error, errors.NotEnoughArguments):
return await OutsideHelp.search(
obj=ctx.invoked_with,
ctx=ctx,
message=(
f"to use the `{ctx.invoked.qualname}` command, "
f"I need {Human.list_([o.name for o in error.missing_options], '`')} to use it"
),
only_one_entry=True,
)
elif isinstance(error, errors.CommandIsOnCooldown):
return await ctx.respond(
f"You have used `{ctx.invoked.qualname}` to often. Retry it in `{error.retry_after:.01f} seconds` again"
)
elif isinstance(error, errors.ConverterFailure):
return await OutsideHelp.search(
obj=ctx.invoked_with,
ctx=ctx,
message=(
f"the option `{error.option.name}` has to be {Human.type_(error.option.arg_type, True)}"
),
only_one_entry=True,
)
elif isinstance(error, errors.MissingRequiredPermission):
return await ctx.respond(
f"You need the `{error.missing_perms.name}` permission, to use `{ctx.invoked_with}`",
flags=hikari.MessageFlag.EPHEMERAL,
)
elif isinstance(error, errors.CheckFailure):
fails = set(
str(error)
.replace("Multiple checks failed: ","")
.replace("This command", f"`{ctx.invoked_with}`")
.split(", ")
)
if len(fails) > 1:
str_fails = [f"{i+1}: {e}"
for i, e in enumerate(fails)
]
return await ctx.respond(
"\n".join(fails)
)
else:
return await ctx.respond(fails.pop())
elif isinstance(error, errors.CommandInvocationError) and isinstance(error.original, BotResponseError):
try:
return await ctx.respond(**error.original.kwargs)
except hikari.BadRequestError:
# interaction probably already acknowledged
# TODO: implement Error handling into InuContext
ctx._responded = True
return await ctx.respond(**error.original.kwargs)
# errors which will only be handled, if the command was invoked with a prefix
if not ctx.prefix:
return # log.debug(f"Suppress error of type: {error.__class__.__name__}")
if isinstance(error, errors.CommandNotFound):
return await OutsideHelp.search(
obj=error.invoked_with,
ctx=ctx,
message=f"There is no command called `{error.invoked_with}`\nMaybe you mean one from the following ones?"
)
else:
error_embed = hikari.Embed()
error_embed.title = "Oh no! A bug occurred"
error_embed.description = random.choice(ERROR_JOKES)
with suppress(hikari.ForbiddenError):
await message_dialog(error_embed)
except Exception:
log.critical(traceback.format_exc())
def load(inu: Inu):
global bot
bot = inu
@bot.listen(hikari.events.ExceptionEvent)
async def on_error(event: hikari.events.ExceptionEvent) -> None:
try:
log.error(f"{''.join(traceback.format_exception(event.exception))}")
except Exception:
log.critical(traceback.format_exc())
inu.add_plugin(pl)
|
zp33dy/inu
|
inu/ext/commands/errors.py
|
errors.py
|
py
| 10,996 |
python
|
en
|
code
| 1 |
github-code
|
6
|
14952780826
|
from temporalio.client import Client
from temporalio.worker import Worker
from temporal_test.activity import say_hello_activity
from temporal_test.config import Config
from temporal_test.workflows import TestWorkflow
import asyncio
import os
async def main():
temporal_host = os.getenv("TEMPORAL_ADDRESS", "127.0.0.1:7233")
print(f"connecting to temporal at {temporal_host}")
client = await Client.connect(temporal_host, namespace="default")
worker = Worker(
client,
task_queue=Config.queue,
workflows=[TestWorkflow],
activities=[say_hello_activity]
)
await worker.run()
print(__name__)
if __name__ == "temporal_test.worker":
print("starting worker")
asyncio.run(main())
if __name__ == "__main__":
print("starting worker")
asyncio.run(main())
|
qadiludmer/temporal-test
|
temporal_test/worker.py
|
worker.py
|
py
| 824 |
python
|
en
|
code
| 0 |
github-code
|
6
|
4495970726
|
from CategoriasProdutos import CateProd
from Compras import Compras
#from MenuOpcoes import MenuOpcoes
Co = Compras()
CP = CateProd()
#Mo = MenuOpcoes()
class ContEstoque:
def __init__(self):
pass
def LimiteEstoque(self,LisProd):
limite = 0
if LisProd == 'vazio':
return 'vazio'
else:
for x in range(len(LisProd)):
if int(LisProd[x][4]) <= int(LisProd[x][5]):
print("%s - CODIGO %s - tem %s pecas disponiveis" % (LisProd[x][1],LisProd[x][0],LisProd[x][4]))
else:
if int(LisProd[x][4]) > int(LisProd[x][5]):
limite += 1
if limite == len(LisProd):
return 0
def Menu(self):
Res = ContEstoque.LimiteEstoque(self,CP.DadosProdutos())
if Res == 'vazio':
print("SEJA BEM VINDO AO CONTROLE DA LOJA, INICIE ADICIONANDO FUNCIONARIOS, DEPOIS CLIENTES E EM SEGUIDA CATEGORIAS E PRODUTOS\n")
#Mo.MostrarMenu()
elif Res == 0:
return("Estoque normal")
else:
Res
Co.MenuCompras()
Contro = ContEstoque()
Contro.Menu()
|
Ander20n/Codigos-Faculdade
|
Projeto IP/ControleEstoque.py
|
ControleEstoque.py
|
py
| 1,224 |
python
|
pt
|
code
| 0 |
github-code
|
6
|
71971281789
|
from kubeflow import fairing
import os
import sys
GCS_PROJECT_ID = fairing.cloud.gcp.guess_project_name()
DOCKER_REGISTRY = 'gcr.io/{}'.format(GCS_PROJECT_ID)
NOTEBOOK_PATH = os.path.join(os.path.dirname(__file__), 'test_notebook.ipynb')
def run_full_notebook_submission(capsys, notebook_file, expected_result,
deployer='job', builder='docker',
namespace='default'):
py_version = ".".join([str(x) for x in sys.version_info[0:3]])
base_image = 'python:{}'.format(py_version)
fairing.config.set_builder(
builder, base_image=base_image, registry=DOCKER_REGISTRY)
fairing.config.set_deployer(deployer, namespace=namespace)
requirements_file = os.path.relpath(
os.path.join(os.path.dirname(__file__), 'requirements.txt'))
fairing.config.set_preprocessor('full_notebook', notebook_file=notebook_file,
output_map={requirements_file: '/app/requirements.txt'})
fairing.config.run()
captured = capsys.readouterr()
assert expected_result in captured.out
def test_full_notebook_job(capsys):
run_full_notebook_submission(capsys, NOTEBOOK_PATH, 'Hello World',
deployer='job')
def test_full_notebook_tfjob(capsys):
run_full_notebook_submission(capsys, NOTEBOOK_PATH, 'Hello World',
deployer='tfjob', namespace='kubeflow-fairing')
|
kubeflow/fairing
|
tests/integration/common/test_full_notebook.py
|
test_full_notebook.py
|
py
| 1,452 |
python
|
en
|
code
| 336 |
github-code
|
6
|
10640003312
|
import requests
import json
import urllib2
BASE_URL = "https://api.github.com"
def confirm_github():
"""Confirm Github is up and running"""
url = BASE_URL
r = requests.get(url)
if r.status_code == 200:
# print "status code:", r.status_code, "(github is working)"
return True
else:
# print "github is down"
return False
def check_member(org_name, member_name):
"""Check if a user is a member of a given org"""
#Based on validity of either org or member, API response has two status responses - 204 or 404
url = BASE_URL+"/orgs/%s/public_members/%s" % (org_name, member_name)
r = requests.get(url)
if r.status_code == 204:
# print member_name, "is a member of", org_name
return True
elif r.status_code == 404:
# print member_name, "is not a member of", org_name
return False
def create_markdown(comment, mode, repo_context):
"""Validating whether markdown has been rendered"""
#Added a try-except to account for '400: Bad Reqeust' when incorrect POST is made
try:
comment_data = {
"text": "%s" % comment,
"mode": "%s" % mode,
"context": "%s" % repo_context
}
req = urllib2.Request(BASE_URL+"/markdown")
req.add_header('Content-Type', 'application/json')
r = urllib2.urlopen(req, json.dumps(comment_data))
if r.getcode() == 200:
#print "markdown doc rendered"
return True
except:
return False
def get_repo_branches(owner, repo):
"""Return branch info for a given repo by a owner"""
#Endpoint delivers error message if either owner or repo provided is invalid
try:
url = BASE_URL+"/repos/%s/%s/branches" % (owner, repo)
r = requests.get(url)
repo_branches = r.json()
#print "branches: ", repo_branches
#print repo_branches["message"]
return repo_branches[0]["commit"]["sha"]
except:
return repo_branches["message"]
|
smithers1221/replicatedcc_python
|
githubapi.py
|
githubapi.py
|
py
| 1,833 |
python
|
en
|
code
| 0 |
github-code
|
6
|
26038294916
|
import inspect
import json
import sys
from pathlib import Path
import pytest
from _pytest import fixtures
from _pytest.compat import get_real_func
def get_func_path(func):
real_func = get_real_func(func)
return inspect.getfile(real_func)
def get_fixturedef(fixture_request, name):
fixturedef = fixture_request._fixture_defs.get(name)
if fixturedef:
return fixturedef
try:
return fixture_request._getnextfixturedef(name)
except fixtures.FixtureLookupError:
return None
def process_fixtures(item):
lockfile_definitions = []
fixture_request = fixtures.FixtureRequest(item, _ispytest=True)
for fixture_name in fixture_request.fixturenames:
fixture_def = get_fixturedef(fixture_request, fixture_name)
if not fixture_def:
continue
func = fixture_def.func
annotations = getattr(func, "__annotations__")
if not annotations or annotations.get("return") != "JVMLockfileFixtureDefinition":
continue
# Note: We just invoke the fixture_def function assuming it takes no arguments. The other two
# ways of invoking for the fixture value cause errors. I have left them here commented-out as an example
# of what failed:
# lockfile_definition = fixture_request.getfixturevalue(fixture_name)
# lockfile_definition = fixture_def.execute(request=request)
try:
lockfile_definition = func()
except Exception as err:
raise ValueError(
f"Exception while getting lockfile definition (file {item.path}): {err}"
)
if lockfile_definition.__class__.__name__ != "JVMLockfileFixtureDefinition":
continue
cwd = Path.cwd()
func_path = Path(get_func_path(func)).relative_to(cwd)
lockfile_definitions.append(
{
"lockfile_rel_path": str(lockfile_definition.lockfile_rel_path),
"requirements": [c.to_coord_str() for c in lockfile_definition.requirements],
"test_file_path": str(func_path),
}
)
return lockfile_definitions
class CollectionPlugin:
def __init__(self):
self.collected = []
def pytest_collection_modifyitems(self, items):
for item in items:
self.collected.append(item)
collection_plugin = CollectionPlugin()
pytest.main(["--setup-only", *sys.argv[1:]], plugins=[collection_plugin])
output = []
cwd = Path.cwd()
for item in collection_plugin.collected:
output.extend(process_fixtures(item))
with open("tests.json", "w") as f:
f.write(json.dumps(output))
|
pantsbuild/pants
|
pants-plugins/internal_plugins/test_lockfile_fixtures/collect_fixtures.py
|
collect_fixtures.py
|
py
| 2,655 |
python
|
en
|
code
| 2,896 |
github-code
|
6
|
38777260786
|
import pandas as pd
import os
import numpy as np
from math import floor
from sqlalchemy import create_engine, MetaData, Table
import tushare as ts
from utils import get_all_tdx_symbols
import click
import struct
"""
读取通达信数据
"""
class TdxFileNotFoundException(Exception):
pass
class TdxReader:
def __init__(self, vipdoc_path):
self.vipdoc_path = vipdoc_path
self.engine = None
def get_kline_by_code(self, code, exchange):
fname = os.path.join(self.vipdoc_path, exchange)
fname = os.path.join(fname, 'lday')
fname = os.path.join(fname, '%s%s.day' % (exchange, code))
return self.parse_data_by_file(fname)
def get_mline_by_code(self, code, exchange):
fname = os.path.join(self.vipdoc_path, exchange)
fname = os.path.join(fname, 'minline')
fname = os.path.join(fname, '%s%s.lc1' % (exchange, code))
return self.parse_mdata_by_file(fname)
def parse_data_by_file(self, fname):
if not os.path.isfile(fname):
raise TdxFileNotFoundException('no tdx kline data, please check path %s', fname)
with open(fname, 'rb') as f:
content = f.read()
return self.unpack_records('<iiiiifii', content)
return []
def parse_mdata_by_file(self, fname):
if not os.path.isfile(fname):
raise TdxFileNotFoundException("no tdx mline data, please check path %s", fname)
with open(fname, 'rb') as f:
content = f.read()
return self.unpack_records('<HHfffffIxxxx', content)
return []
def unpack_records(self, format, data):
record_struct = struct.Struct(format)
return (record_struct.unpack_from(data, offset)
for offset in range(0, len(data), record_struct.size))
def get_df(self, code, exchange):
data = [self._df_convert(row) for row in self.get_kline_by_code(code, exchange)]
df = pd.DataFrame(data=data, columns=('date', 'open', 'high', 'low', 'close', 'amount', 'volume'))
df.index = pd.to_datetime(df.date)
return df[['open', 'high', 'low', 'close', 'volume']]
def get_mindf(self, code, exchange):
data = [self._mindf_convert(row) for row in self.get_mline_by_code(code, exchange)]
df = pd.DataFrame(data=data, columns=('datetime', 'open', 'high', 'low', 'close', 'amount', 'volume'))
try:
df.index = pd.to_datetime(df.datetime)
except ValueError as err:
print("ValueError: ", df.datetime)
raise err
return df[['open', 'high', 'low', 'close', 'amount', 'volume']]
def _df_convert(self, row):
t_date = str(row[0])
datestr = t_date[:4] + "-" + t_date[4:6] + "-" + t_date[6:]
new_row = (
datestr,
row[1] * 0.01, # * 0.01 * 1000 , zipline need 1000 times to original price
row[2] * 0.01,
row[3] * 0.01,
row[4] * 0.01,
row[5],
row[6]
)
return new_row
def _mindf_convert(self, row):
t_date = row[0]
year = floor(t_date / 2048) + 2004
month = floor((t_date % 2048) / 100)
day = (t_date % 2048) % 100
datestr = "%d-%02d-%02d" % (year, month, day)
t_minute = row[1]
hour = floor(t_minute / 60)
minute = t_minute % 60
timestr = "%02d:%02d:00" % (hour, minute)
datetimestr = "%s %s" % (datestr, timestr)
new_row = (
datetimestr,
row[2],
row[3],
row[4],
row[5],
row[6],
row[7]
)
return new_row
def to_sql(self, symbol, exchange):
table_name = exchange+symbol
table = Table(table_name, MetaData(bind=self.engine))
new = self.get_mindf(symbol, exchange)
if table.exists():
old = pd.read_sql_table(exchange+symbol, self.engine, index_col='datetime')
if new.index[-1] <= old.index[-1]:
return
else:
df_to_append = new[old.index[-1]:]
else:
df_to_append = new
df_to_append.to_sql(table_name, self.engine, if_exists='append')
def save_minute_line(self, sql_url):
self.engine = create_engine(sql_url)
tdx_symbol_list = get_all_tdx_symbols()
total = len(tdx_symbol_list)
i = 0
for symbol in tdx_symbol_list:
i += 1
click.echo("saving symbol %s%s (%d/%d)" %(symbol[1], symbol[0], i, total))
self.to_sql(symbol=symbol[0], exchange=symbol[1])
@click.command()
@click.argument('vipdoc', type=click.Path(exists=True))
@click.argument('sql_url', type=click.Path())
def main(vipdoc, sql_url):
click.echo('minute line saving...')
tdx_reader = TdxReader(vipdoc)
tdx_reader.save_minute_line("sqlite:///" + sql_url)
if __name__ == '__main__':
main()
#tdx_reader = TdxReader('c:\\new_zx_allin1\\vipdoc\\')
# try:
# #for row in tdx_reader.parse_data_by_file('/Volumes/more/data/vipdoc/sh/lday/sh600000.day'):
# # print(row)
# for row in tdx_reader.get_mline_by_code('600433', 'sh'):
# print(row)
# except TdxFileNotFoundException as e:
# pass
#
# print(tdx_reader.get_mindf('600433', 'sh'))
#sql_url = "sqlite:///lc1.db"
#tdx_reader.save_minute_line(sql_url=sql_url)
|
maxwell-lv/MyQuant
|
tdxreader.py
|
tdxreader.py
|
py
| 5,452 |
python
|
en
|
code
| 0 |
github-code
|
6
|
43272673113
|
import os
import cv2
import numpy as np
import torch
from PIL import Image
import torchvision
class SegDataset(object):
def __init__(self, root, transforms):
self.root = root
self.transforms = transforms
# load all image files, sorting them to
# ensure that they are aligned
# cut_dataset_at=10000
self.imgs = list(sorted(os.listdir(os.path.join(root, "images"))))#[:cut_dataset_at]
self.masks = list(sorted(os.listdir(os.path.join(root, "masks"))))#[:cut_dataset_at]
self.polys = list(sorted(os.listdir(os.path.join(root, "polygons"))))#[:cut_dataset_at]
def __getitem__(self, idx):
# load images ad masks
img_path = os.path.join(self.root, "images", self.imgs[idx])
mask_path = os.path.join(self.root, "masks", self.masks[idx])
poly_path = os.path.join(self.root, "polygons", self.polys[idx])
img = Image.open(img_path).convert("RGB")
img = np.array(img)
mask = Image.open(mask_path)
mask = np.array(mask)
# instances are encoded as different colors
obj_ids = np.unique(mask)
# first id is the background, so remove it
obj_ids = obj_ids[1:]
# split the color-encoded mask into a set of binary masks
masks = mask == obj_ids[:, None, None]
polys = np.load(poly_path)
# get bounding box coordinates for each mask
num_objs = len(obj_ids)
boxes = []
for i in range(num_objs):
pos = np.where(masks[i])
xmin = np.min(pos[1])
xmax = np.max(pos[1])
ymin = np.min(pos[0])
ymax = np.max(pos[0])
boxes.append([xmin, ymin, xmax, ymax])
# convert everything into a torch.Tensor
boxes = torch.as_tensor(boxes, dtype=torch.float32)
mask = np.zeros((img.shape[0], img.shape[1]))
cv2.drawContours(mask, [polys], -1, 1, 5)
model_input_size = (300, 300)
img = cv2.resize(img, model_input_size)
mask = cv2.resize(mask, model_input_size)
img = torchvision.transforms.ToTensor()(img)
mask = torch.tensor(np.expand_dims(mask, axis=0), dtype=torch.float)
# if self.transforms is not None:
# for transform in self.transforms:
# print(transform)
# img = transform(img)
# mask = transform(target)
return img, mask
def __len__(self):
return len(self.imgs)
|
v7labs/deeplabv3-edges
|
dataset.py
|
dataset.py
|
py
| 2,535 |
python
|
en
|
code
| 1 |
github-code
|
6
|
72355994109
|
#!/usr/bin/python3
"""Module that lists all State objects from the database hbtn_0e_6_usa"""
from sys import argv
from model_state import Base, State
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
if __name__ == '__main__':
engine = create_engine('mysql+mysqldb://{}:{}@localhost:3306/{}'
.format(argv[1], argv[2], argv[3]))
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
selected_state = session.query(State).filter(State.name == argv[4])
try:
print("{}".format(selected_state[0].id))
except IndexError:
print("Not found")
|
MrZooM001/alx-higher_level_programming
|
0x0F-python-object_relational_mapping/10-model_state_my_get.py
|
10-model_state_my_get.py
|
py
| 672 |
python
|
en
|
code
| 0 |
github-code
|
6
|
39964228620
|
import pymysql
from tkinter import *
from tkinter import messagebox
import sys
import datetime
u=sys.argv[1]
p=sys.argv[2]
class dropdown:
def __init__(self,appen,lis):
self.m = StringVar()
self.m.set("choose")
self.opt=OptionMenu(appen,self.m,*lis)
self.opt.grid(row=len(lis),column=1)
def place(self,p,q):
self.opt.place(x=p,y=q)
db = pymysql.connect("localhost",u,p,"DBMS")
cursor = db.cursor()
LEC=[]
STU=[]
courses=[]
venues=[]
t=['0{}:00:00'.format(x) for x in range(7,10)]
t+=['{}:00:00'.format(x) for x in range(10,19)]
Day=['Monday','Tuesday','Wednesday','Thursday','Friday']
def up():
global LEC
global STU
global courses
global venues
cursor.execute("Select * from Lecturer;")
LEC = cursor.fetchall()
cursor.execute("Select * from Student;")
STU = cursor.fetchall()
cursor.execute("Select CourseID from Courses order by CourseID ASC;")
courses = cursor.fetchall()
cursor.execute("Select Venue from Venues order by Venue ASC")
venues = list(map(lambda x: str(x).strip(",')("),cursor.fetchall()))
up()
cl={'CT':"blue",'CS-A':"yellow",'CS-B':"green",'CIS':"orange"}
def table():
ind = 0
top=Toplevel()
top.geometry("480x540")
top.resizable(height=False,width=False)
top.title("Schedule")
w=Canvas(top,bg="white",height=480,width=480)
w.pack()
for i in range(1,9):
w.create_line(i*60,0,i*60,480,fill="black")
for i in range(1,12):
w.create_line(0,i*40,480,i*40,fill="black")
for i in range(7,18):
Label(top,text=(str(i)+":00")).place(x=5,y=(i-7)*40+53)
for i,l in zip(range(7,15),venues):
Label(top,text=l).place(x=(i-7)*60+68,y=12)
def tbl():
nonlocal ind
cursor.execute("select Schedule.CourseID, Schedule.Program, Schedule.Venue, Schedule.StartTime, Schedule.StopTime, Courses.LecturerID from Schedule inner join Courses on Courses.CourseID = Schedule.CourseID where Day = '{}';".format(Day[ind]))
a=cursor.fetchall()
for i in range(len(a)):
st=str((datetime.datetime.min + a[i][3]).time())
sp=str((datetime.datetime.min + a[i][4]).time())
w.create_rectangle((venues.index(a[i][2])+1)*60,(t.index(st)+1)*40,(venues.index(a[i][2])+2)*60,(t.index(sp)+1)*40,fill=cl[a[i][1]])
y=((t.index(st)+1)*40 + (t.index(sp)+1)*40)/2 - 18
Label(w,text="{}\n{}\n{}".format(a[i][0],a[i][1],a[i][5])).place(x=(venues.index(a[i][2]))*60+64,y=y) #(t.index(st))*40+47)
tbl()
def prev_():
nonlocal ind
if ind >0:
ind-=1
for wid in w.winfo_children():
wid.destroy()
for i in w.find_all()[19:]:
w.delete(i)
tbl()
lab.config(text=Day[ind])
def nex_():
nonlocal ind
if ind <4:
ind+=1
for wid in w.winfo_children():
wid.destroy()
for i in w.find_all()[19:]:
w.delete(i)
tbl()
lab.config(text=Day[ind])
lab=Label(top,text=Day[ind])
lab.place(x=220,y=505)
prev=Button(top,text="prev",command = prev_)
nex = Button(top,text="next",command=nex_)
prev.place(x=130,y=500)
nex.place(x=302,y=500)
def Student():
i = 0
top=Toplevel()
top.title("Student")
top.geometry("300x280")
top.resizable(height=False,width=False)
l1 = Label(top,text="Matric:")
l2 = Label(top,text="FName:")
l3 = Label(top,text="Lname:")
l4 = Label(top,text="Program:")
l1.place(x=98,y=30)
l2.place(x=105,y=70)
l3.place(x=105,y=110)
l4.place(x=91,y=150)
d1 = Label(top,text=STU[i][1])
d2 = Label(top,text=STU[i][2])
d3 = Label(top,text=STU[i][3])
d4 = Label(top,text=STU[i][4])
d1.place(x=170,y=30)
d2.place(x=170,y=70)
d3.place(x=170,y=110)
d4.place(x=170,y=150)
def pr():
nonlocal i
if i > 0:
i -= 1
d1.configure(text=STU[i][1])
d2.configure(text=STU[i][2])
d3.configure(text=STU[i][3])
d4.configure(text=STU[i][4])
def ne():
nonlocal i
if i < len(STU)-1:
i += 1
d1.configure(text=STU[i][1])
d2.configure(text=STU[i][2])
d3.configure(text=STU[i][3])
d4.configure(text=STU[i][4])
def new():
nonlocal i
def upd(a,b,c,ap):
a=a.strip()
b=b.strip()
sql="insert into Student value (NULL,NULL,'{}','{}','{}');".format(a,b,c)
try:
cursor.execute(sql)
cursor.execute("call GenMatric();")
db.commit()
messagebox.showinfo("Confirmation","Student Added Successfully.")
up()
ap.destroy()
except:
db.rollback()
messagebox.showerror("Value Error","Could not add Student")
appen = Toplevel()
appen.title("New Student")
appen.geometry("300x230")
appen.resizable(height=False,width=False)
l1 = Label(appen,text="id:")
l1.place(x=50,y=20)
l2 = Label(appen,text ="FName:")
l2.place(x=29,y=60)
l3 = Label(appen,text ="LName:")
l3.place(x=29,y=100)
l4 = Label(appen,text ="Program:")
l4.place(x=15,y=140)
id_ = Label(appen,text=str(len(STU)+1))
id_.place(x=100,y=20)
fname = Entry(appen,bd=5)
fname.place(x=100,y=60)
lname = Entry(appen,bd=5)
lname.place(x=100,y=100)
prog=dropdown(appen,['CT','CIS','CS-A','CS-B'])
prog.place(100,135)
comit=Button(appen,text="register",command=lambda: upd(fname.get(),lname.get(),prog.m.get(),appen))
comit.place(x=100,y=180)
prev = Button(top,text="Prev",command = pr)
prev.place(x=50,y=190)
next_ = Button(top,text="Next",command = ne)
next_.place(x =190,y=190)
new = Button(top,text="+",command= new)
new.place(x=130,y=230)
def Lecturer():
i = 0
top=Toplevel()
top.title("Lecturer")
top.geometry("300x230")
top.resizable(height=False,width=False)
l1 = Label(top,text="LecturerID:")
l2 = Label(top,text="FName:")
l3 = Label(top,text="Lname:")
l1.place(x=70,y=30)
l2.place(x=105,y=70)
l3.place(x=105,y=110)
d1 = Label(top,text=LEC[i][1])
d2 = Label(top,text=LEC[i][2])
d3 = Label(top,text=LEC[i][3])
d1.place(x=170,y=30)
d2.place(x=170,y=70)
d3.place(x=170,y=110)
def pr():
nonlocal i
if i > 0:
i -= 1
d1.configure(text=LEC[i][1])
d2.configure(text=LEC[i][2])
d3.configure(text=LEC[i][3])
def ne():
nonlocal i
if i < len(LEC)-1:
i += 1
d1.configure(text=LEC[i][1])
d2.configure(text=LEC[i][2])
d3.configure(text=LEC[i][3])
def new():
nonlocal i
def upd(a,b,ap):
a=a.strip()
b=b.strip()
sql="insert into Lecturer value (NULL,NULL,'{}','{}');".format(a,b)
try:
cursor.execute(sql)
cursor.execute("call GenLecID();")
db.commit()
messagebox.showinfo("Confirmation","Lecturer Added Successfully.")
up()
ap.destroy()
except:
db.rollback()
messagebox.showerror("Value Error","Could not add Lecturer")
appen = Toplevel()
appen.title("New Lecturer")
appen.geometry("300x210")
appen.resizable(height=False,width=False)
l1 = Label(appen,text="id:")
l1.place(x=50,y=20)
l2 = Label(appen,text ="FName:")
l2.place(x=29,y=60)
l3 = Label(appen,text ="LName:")
l3.place(x=29,y=100)
id_ = Label(appen,text=str(len(LEC)+1))
id_.place(x=100,y=20)
fname = Entry(appen,bd=5)
fname.place(x=100,y=60)
lname = Entry(appen,bd=5)
lname.place(x=100,y=100)
comit=Button(appen,text="register",command=lambda: upd(fname.get(),lname.get(),appen))
comit.place(x=100,y=140)
prev = Button(top,text="Prev",command = pr)
prev.place(x =50,y=150 )
next_ = Button(top,text="Next",command = ne)
next_.place(x =180,y=150)
new = Button(top,text="+",command= new)
new.place(x=130,y=190)
def schedule():
def course_handler(a,b,c,d,e,f,top):
try:
cursor.execute("select CourseID from CourseTaken where Program = '{}';".format(b))
pro_course = list(map(lambda x: str(x).strip(",')("),cursor.fetchall()))
cursor.execute("select Size from Venues order by Venue ASC;")
v = list(map(lambda x: str(x).strip(",')("),cursor.fetchall()))
cursor.execute("select * from Student where Program = '{}';".format(b))
classSize = len(cursor.fetchall())
cursor.execute("select Program, Day, CourseID, StartTime, StopTime from Schedule where CourseID = '{}' and Program = '{}';".format(a,b))
dur = sum(list(map(lambda o:int(str(o[4])[:2].strip(":("))-int(str(o[3])[:2].strip(":(,)")),cursor.fetchall())))
cursor.execute("select Units from Courses where CourseID = '{}';".format(a))
cred = int(cursor.fetchall()[0][0])
cursor.execute("select Venue, Day, StartTime, StopTime from Schedule;")
sch = cursor.fetchall()
cursor.execute("select Program, Day, StartTime, StopTime from Schedule;")
clas = cursor.fetchall()
cursor.execute("select LecturerID, Day, StartTime, StopTime from Courses inner join Schedule on Courses.CourseID = Schedule.CourseID where Courses.CourseID = '{}';".format(a))
lect = cursor.fetchall()
except:
messagebox.showerror("Connection Error","Could Not connect to database")
return
def timer(a):
return datetime.timedelta(hours=datetime.datetime.strptime(a,'%H:%M:%S').hour)
if f<=e:
messagebox.showerror("Schedule Error","Stop Time cannot be earlier than Start Time")
elif a not in pro_course:
messagebox.showerror("Schedule Error","{} do not offer {}".format(b,a))
elif int(v[list(map(lambda x: str(x).strip(",')("),venues)).index(c)])<classSize:
messagebox.showerror("Schedule Error","Venue is too small")
elif cred < dur+int(datetime.datetime.strptime(f,'%H:%M:%S').hour-datetime.datetime.strptime(e,'%H:%M:%S').hour):
messagebox.showerror("Schedule Error", "Course Overload")
elif (str(c),str(d),timer(e),timer(f)) in sch:
messagebox.showerror("Schedule Error","class already holding at venue")
elif (str(b),str(d),timer(e),timer(f)) in clas:
messagebox.showerror("Schedule Error","{} already have a class then".format(b))
elif (str(lect[0][0]),str(d),timer(e),timer(f)) in lect:
messagebox.showerror("Schedule Error","{} is already teaching a class then".format(lect[0][0]))
else:
try:
cursor.execute("INSERT into Schedule value ('{}','{}','{}','{}','{}','{}');".format(a,b,c,d,e,f))
db.commit()
top.destroy()
except:
db.rollback()
messagebox.showerror("Connection Error","Could not connect to database")
top=Toplevel()
top.title("Scheduler")
top.geometry("360x320")
top.resizable(height=False,width=False)
l1 = Label(top, text = 'Course:')
l2 = Label(top,text = 'Program:')
l3 = Label(top, text = 'Venue:')
l4 = Label(top, text = 'Day:')
l5 = Label(top, text = 'Start time:')
l6 = Label(top, text = 'Stop time:')
l1.place(x=100,y=30)
l2.place(x=93,y=70)
l3.place(x=107,y=110)
l4.place(x=121,y=150)
l5.place(x=72,y=190)
l6.place(x=79,y=230)
e1 = dropdown(top,list(map(lambda x: str(x).strip(",')("),courses)))
e1.place(170,25)
e2 = dropdown(top,['CT','CIS','CS-A','CS-B'])
e2.place(170,65)
e3 = dropdown(top,list(map(lambda x: str(x).strip(",')("),venues)))
e3.place(170,105)
e4 = dropdown(top,['Monday','Tuesday','Wednesday','Thursday','Friday'])
e4.place(170,145)
e5 = dropdown(top,t[:len(t)-1])
e5.place(170,185)
e6 = dropdown(top,t[1:])
e6.place(170,225)
add_course = Button(top,text="ADD COURSE",command=lambda:course_handler(e1.m.get(),e2.m.get(),e3.m.get(),e4.m.get(),e5.m.get(),e6.m.get(),top))
add_course.place(x=140,y=275)
root = Tk()
root.title("DBMS")
root.geometry("500x500")
root.resizable(height=False,width=False)
w=Canvas(root,bg="white",height=500,width=500)
w.pack()
stu = Button(root,text = "Show Students",command = Student)
lec = Button(root,text = "Show Lecturers",command = Lecturer)
sch = Button(root,text = "Time Table",command=table)
form = Button(root, text = "Schedule Class", command = schedule)
stu.place(x=186.5,y=80)
lec.place(x=186,y=180)
sch.place(x=200,y=280)
form.place(x=186,y=380)
root.mainloop()
db.close()
|
2HgO/CLASS-SCHEDULER
|
Scheduler.py
|
Scheduler.py
|
py
| 13,116 |
python
|
en
|
code
| 1 |
github-code
|
6
|
39341038944
|
class Point2D:
def __init__(self, x, y):
self.x = x
self.y = y
class Point3D:
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
class Line:
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
@property
def distance(self):
if type(self.p1) == type(self.p2):
if type(self.p1) == Point2D:
return(round(((self.p2.x-self.p1.x)**2+(self.p2.y-self.p1.y)**2)**.5,3))
if type(self.p1) == Point3D:
return(round(((self.p2.x-self.p1.x)**2+(self.p2.y-self.p1.y)**2+(self.p2.z-self.p1.z)**2)**.5,3))
return("Error - Points of different dimensions")
@property
def midpoint(self):
if type(self.p1) == type(self.p2):
if type(self.p1) == Point2D:
return((round((self.p1.x+self.p2.x)/2, 2), round((self.p1.y+self.p2.y)/2,2)))
if type(self.p1) == Point3D:
return((round((self.p1.x+self.p2.x)/2, 2), round((self.p1.y+self.p2.y)/2, 2), round((self.p1.z+self.p2.z)/2, 2)))
return("Error - Points of different dimensions")
|
SchulerHunter/CMPSC-132
|
Quiz 1/Coding_Q2.py
|
Coding_Q2.py
|
py
| 1,144 |
python
|
en
|
code
| 0 |
github-code
|
6
|
10865691548
|
from django.contrib import admin
from django.db import models
from django.db.models import Q
from django.utils.translation import gettext_lazy as _
from ...settings import ADMIN_MEDIA_JS
from .actions import save_all_theses_to_xls
from .models import Thesis
# Copied from https://gist.github.com/rafen/eff7adae38903eee76600cff40b8b659,
# also present in theses admin and jobs admin
class ExtendedActionsMixin(object):
# actions that can be executed with no items selected on the admin change list.
# The filtered queryset displayed to the user will be used instead
extended_actions = []
def changelist_view(self, request, extra_context=None):
# if a extended action is called and there's no checkbox selected, select one with
# invalid id, to get an empty queryset
if "action" in request.POST and request.POST["action"] in self.extended_actions:
if not request.POST.getlist(admin.ACTION_CHECKBOX_NAME):
post = request.POST.copy()
post.update({admin.ACTION_CHECKBOX_NAME: 0})
request._set_post(post)
return super(ExtendedActionsMixin, self).changelist_view(request, extra_context)
def get_changelist_instance(self, request):
"""
Returns a simple ChangeList view instance of the current ModelView.
(It's a simple instance since we don't populate the actions and list filter
as expected since those are not used by this class)
"""
list_display = self.get_list_display(request)
list_display_links = self.get_list_display_links(request, list_display)
list_filter = self.get_list_filter(request)
search_fields = self.get_search_fields(request)
list_select_related = self.get_list_select_related(request)
ChangeList = self.get_changelist(request)
return ChangeList(
request,
self.model,
list_display,
list_display_links,
list_filter,
self.date_hierarchy,
search_fields,
list_select_related,
self.list_per_page,
self.list_max_show_all,
self.list_editable,
self,
self.sortable_by,
)
def get_filtered_queryset(self, request):
"""
Returns a queryset filtered by the URLs parameters
"""
cl = self.get_changelist_instance(request)
return cl.get_queryset(request)
class ThesisListFilter(admin.SimpleListFilter):
title = _("empty thesis title")
parameter_name = "have_title"
def lookups(self, request, model_admin):
return (
("yes", _("Yes")),
("no", _("No")),
)
def queryset(self, request, queryset):
if self.value() == "no":
return queryset.filter(title__isnull=False).exclude(title="")
if self.value() == "yes":
return queryset.filter(Q(title__isnull=True) | Q(title__exact=""))
@admin.register(Thesis)
class ThesisAdmin(admin.ModelAdmin):
list_display = ("get_author", "title", "show_year", "type")
list_filter = ("type", ThesisListFilter)
search_fields = (
"title",
"alumnus__last_name",
"alumnus__first_name",
"date_start",
"date_stop",
"date_of_defence",
)
ordering = ("alumnus__username",)
filter_horizontal = ("advisor",)
readonly_fields = (
"date_created",
"date_updated",
"last_updated_by",
"slug",
)
actions = (
"export_selected_degrees_to_excel",
"export_all_degrees_to_excel",
"export_filtered_degrees_to_excel",
)
extended_actions = (
"export_all_degrees_to_excel",
"export_filtered_degrees_to_excel",
)
max_num = 2
fieldsets = [
(
"Thesis Information",
{"fields": ["alumnus", "type", "date_start", "date_stop"]},
),
(
"Thesis Information",
{
"fields": [
"title",
"date_of_defence",
"url",
"dissertation_nr",
"slug",
"in_library",
]
},
),
("Thesis Advisor ", {"fields": ["advisor"]}),
("Full Text and Cover Photo", {"fields": ["pdf", "photo"]}),
(
"Extra information",
{
"classes": ["collapse"],
"fields": [
"comments",
"date_created",
"date_updated",
"last_updated_by",
],
},
),
]
class Media:
js = ADMIN_MEDIA_JS
css = {"all": ("css/admin_extra.css",)}
def save_model(self, request, obj, form, change):
obj.last_updated_by = request.user
obj.save()
# def changelist_view(self, request, extra_context=None):
# """ Hack the default changelist_view to allow action "export_all_degrees_to_excel"
# to run without selecting any objects """
# if "action" in request.POST and request.POST["action"] == "export_all_degrees_to_excel":
# if not request.POST.getlist(admin.ACTION_CHECKBOX_NAME):
# post = request.POST.copy()
# for u in Thesis.objects.all():
# post.update({admin.ACTION_CHECKBOX_NAME: str(u.id)})
# request._set_post(post)
# return super(ThesisAdmin, self).changelist_view(request, extra_context)
def get_queryset(self, request):
"""This function defines how to sort on alumnus column in the list_display
http://stackoverflow.com/a/29083623"""
qs = super(ThesisAdmin, self).get_queryset(request)
qs = qs.annotate()
# TODO: this does not take into account the type of the Thesis. Also, when
# filtering on type = "PhD" ordering of the Theses could be done on the MSc Thesis
qs = qs.annotate(
sort_author=models.Count("alumnus__last_name", distinct=True)
).annotate(
sort_year=models.Count("alumnus__theses__date_of_defence", distinct=True)
)
return qs
# def formfield_for_manytomany(self, db_field, request, **kwargs):
# try: # Breaks for add thesis
# current_thesis = Thesis.objects.get(pk=request.resolver_match.args[0])
# if db_field.name == "advisor":
# kwargs["queryset"] = Alumnus.objects.exclude(username=current_thesis.alumnus.username)
# return super(ThesisAdmin, self).formfield_for_manytomany(db_field, request, **kwargs)
# except IndexError as e:
# if str(e) == "tuple index out of range":
# pass
def get_author(self, obj):
"""We could use author instead of get_alumnus in list_display"""
return obj.alumnus.full_name
get_author.short_description = "Author"
get_author.admin_order_field = "sort_author"
def show_year(self, obj):
if obj.date_of_defence:
return obj.date_of_defence.strftime("%Y")
elif obj.date_stop:
return obj.date_stop.strftime("%Y")
return None
show_year.short_description = "Year"
show_year.admin_order_field = "sort_year"
def export_selected_degrees_to_excel(self, request, queryset):
return save_all_theses_to_xls(request, queryset)
export_selected_degrees_to_excel.short_description = (
"Export selected Theses to Excel"
)
def export_all_degrees_to_excel(self, request, queryset):
return save_all_theses_to_xls(request, None)
export_all_degrees_to_excel.short_description = "Export all Theses to Excel"
def export_filtered_degrees_to_excel(self, request, queryset):
queryset = self.get_filtered_queryset(request)
return save_all_theses_to_xls(request, queryset)
export_filtered_degrees_to_excel.short_description = (
"Export filtered list of Theses to Excel"
)
|
tlrh314/UvA_API_Alumni
|
apiweb/apps/research/admin.py
|
admin.py
|
py
| 8,102 |
python
|
en
|
code
| 2 |
github-code
|
6
|
27466582699
|
"""
학생들에게 0~N번의 번호를 부여할 때, 총 N+1개의 팀이 존재한다.
이떄 선생님은 두가지 옵션을 선택할 수 있다.
1. "팀 합치기" : 두 팀을 합치는 것
2. "같은 팀 여부 확인": 특정한 두 학생이 같은 팀에 들어가 있는가 확인하는 것.
선생님이 M개의 연산을 하여 연산결과를 출력하는 프로그램을 작성하여라
첫째줄에 N,M 이 주어진다. (1<=N,M<=100,000)
다음 M개의 줄에는 0 a b 나 1 a b 형태로 값이 주어진다.
0 a b 는 a와 b 학생이 속한 팀을 합치는 것이고
1 a b 는 a와 b가 같은 팀에 속해있는지의 여부를 확인하는 것이다.
a,b는 양의 정수다.
같은 팀 여부 확인
[INPUT]
7 8
0 1 3
1 1 7
0 7 6
1 7 1
0 3 7
0 4 2
0 1 1
1 1 1
[OUTPUT]
NO
NO
YES
"""
import sys
input = sys.stdin.readline
n,m = map(int, input().split())
par = [0]*(n+1)
for i in range(1, n+1):
par[i] = i
def find_union(p,x):
if x != p[x]:
p[x] = find_union(p,p[x])
return p[x]
def union(p,a,b):
a,b = find_union(p,a), find_union(p,b)
if a<b:
p[b] = a
else:
p[a] = b
result = []
for _ in range(m):
c,a,b = map(int, input().split())
if c == 0:
union(par,a,b)
if c == 1:
if find_union(par, a) == find_union(par, b):
result.append("YES")
else:
result.append("NO")
print(*result)
|
20190511/GraphAlgorithm
|
문제풀이/Q2-팀결성.py
|
Q2-팀결성.py
|
py
| 1,442 |
python
|
ko
|
code
| 1 |
github-code
|
6
|
8413409544
|
import numpy as np
from itertools import product
from vgc_project.maze import Maze
def test_basic_maze_properties():
pw=.94
ps=.6
m = Maze(
tile_array=(
".j.",
"x#3"
),
absorbing_features=("j",),
wall_features=("#","3"),
default_features=(".",),
initial_features=("x",),
step_cost=-1,
wall_bump_cost=0,
wall_block_prob=pw,
success_prob=ps,
discount_rate=1.0,
include_action_effect=True,
include_wall_effect=True,
include_terminal_state_effect=True,
wall_bias=0.
)
# all locations are part of state space
assert list(m.state_list) == list(product(range(3), range(2)))
# fixed action ordering
assert list(m.action_list) == [(1, 0), (-1, 0), (0, 0), (0, 1), (0, -1)]
right, left, wait, up, down = m.action_list
# all non-terminal state-actions should have the step cost
sa_rf = (m.reward_matrix*m.transition_matrix).sum(-1)
non_term_sa_rf = sa_rf[m.nonterminal_state_vec]
assert (non_term_sa_rf == m.step_cost).all()
# transition function
tf = m.transition_matrix
nss = tf.shape[0]
aa = m.action_list
ss = m.state_list
# wait action
assert (tf[np.arange(nss), 2, np.arange(nss)] == 1).all()
# going off edge of grid leads you to stay in place
assert tf[ss.index((0, 0)), aa.index(left), ss.index((0, 0))] == 1
assert tf[ss.index((2, 0)), aa.index(right), ss.index((2, 0))] == 1
# dynamics of goign from nonwall to nonwall
assert tf[ss.index((0, 0)), aa.index(up), ss.index((0, 1))] == ps
# exiting a wall into a non-wall or wall has the same probability
# namely, we ignore the wall dynamics
assert tf[ss.index((1, 0)), aa.index(left), ss.index((0, 0))] == ps
assert tf[ss.index((1, 0)), aa.index(right), ss.index((2, 0))] == ps
# dynamics of crashing into a wall
assert np.isclose(
tf[ss.index((0, 0)), aa.index(right), ss.index((1, 0))],
(ps*(1 - pw))/(ps*(1 - pw) + pw*(1 - ps))
)
# terminal state leads to itself with probability 1
# and being in it always gives reward 0
assert not m.nonterminal_state_vec[ss.index((1, 1))]
assert m.nonterminal_state_vec.sum() == 5
assert (tf[ss.index((1, 1)), :, ss.index((1, 1))] == 1).all()
assert (sa_rf[ss.index((1, 1))] == 0).all()
|
markkho/value-guided-construal
|
vgc_project/vgc_project/tests/test_maze.py
|
test_maze.py
|
py
| 2,403 |
python
|
en
|
code
| 20 |
github-code
|
6
|
40275219447
|
# %%
import pandas as pd
import plotly.io as pio
import plotly.express as px
import plotly
# %%
adani_df = pd.read_csv('Data\Scatterplot\ADANIENT_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
appolo_df = pd.read_csv('Data\Scatterplot\APOLLOHOSP_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
asian_df = pd.read_csv('Data\Scatterplot\ASIANPAINT_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
airtel_df = pd.read_csv('Data\Scatterplot\BHARTIARTL_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
bajaj_df = pd.read_csv('Data\Scatterplot\BAJFINANCE_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
divis_df = pd.read_csv('Data\Scatterplot\DIVISLAB_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
drreddy_df = pd.read_csv('Data\Scatterplot\DRREDDY_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
hind_df = pd.read_csv('Data\Scatterplot\HINDALCO_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
hdfc_df = pd.read_csv('Data\Scatterplot\HDFC_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
hul_df = pd.read_csv('Data\Scatterplot\HINDUNILVR_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
infy_df = pd.read_csv('Data\Scatterplot\INFY_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
itc_df = pd.read_csv('Data\Scatterplot\ITC_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
lt_df = pd.read_csv('Data\Scatterplot\LT_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
mm_df = pd.read_csv('Data\Scatterplot\MM_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
nestle_df = pd.read_csv(r'Data\Scatterplot\NESTLEIND_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
ongc_df = pd.read_csv('Data\Scatterplot\ONGC_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
power_df = pd.read_csv('Data\Scatterplot\POWERGRID_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
rel_df = pd.read_csv('Data\Scatterplot\RELIANCE_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
sbi_df = pd.read_csv('Data\Scatterplot\SBIN_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
sun_df = pd.read_csv('Data\Scatterplot\SUNPHARMA_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
tatam_df = pd.read_csv('Data\Scatterplot\TATAMOTORS_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
tcs_df = pd.read_csv('Data\Scatterplot\TCS_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
ulttech_df = pd.read_csv(r'Data\Scatterplot\ULTRACEMCO_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
upl_df = pd.read_csv(r'Data\Scatterplot\UPL_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
wipro_df = pd.read_csv(r'Data\Scatterplot\WIPRO_day_data_processed.csv', parse_dates=['date'], index_col=['date'])
# %%
adani_df['company'] = 'Adani Enterprises'
appolo_df['company'] = 'Apollo Hospitals'
asian_df['company'] = 'Asian Paints'
airtel_df['company'] = 'Bharti Airtel'
bajaj_df['company'] = 'Bajaj Finance'
drreddy_df['company'] = "Dr. Reddy's Laboratories"
hdfc_df['company'] = "HDFC Bank"
infy_df['company'] = 'Infosys'
itc_df['company'] = 'ITC'
lt_df['company'] = 'Larsen & Toubro'
mm_df['company'] = 'Mahindra & Mahindra'
nestle_df['company'] = 'Nestle India'
ongc_df['company'] = 'Oil and Natural Gas Corporation'
rel_df['company'] = 'Reliance Industries'
sbi_df['company'] = 'State Bank of India'
divis_df['company'] = "Divi's Laboratories"
hind_df['company'] = 'Hindalco Industries'
hul_df['company'] = 'Hindustan Unilever'
power_df['company'] = 'Power Grid Corporation of India'
sun_df['company'] = 'Sun Pharmaceutical'
tatam_df['company'] = 'Tata Motors'
tcs_df['company'] = 'Tata Consultancy Services'
ulttech_df['company'] = 'UltraTech Cement'
upl_df['company'] = 'United Phosphorus Limited'
wipro_df['company'] = 'Wipro'
# %%
df_stocks = pd.concat([adani_df, appolo_df, asian_df, airtel_df, bajaj_df, divis_df, drreddy_df, infy_df, hind_df, hdfc_df,
hul_df, itc_df, lt_df, mm_df, nestle_df, ongc_df, power_df, sbi_df, sun_df, rel_df, tatam_df, tcs_df, ulttech_df, upl_df, wipro_df], axis=0)
# %%
df = []
for company in df_stocks['company'].unique():
company_df = df_stocks[['volume', 'close']][df_stocks['company'] == company]
company_df[f'{company}_dollar_volume'] = company_df['volume'] * company_df['close']
company_df = company_df[[f'{company}_dollar_volume']]
df.append(company_df)
df = pd.concat(df, axis = 1)
# %%
company_df = df_stocks[['volume', 'close', 'company']]
company_df['dollar_volume'] = company_df['volume'] * company_df['close']
# %%
d= []
for company in df_stocks['company'].unique():
monthly_volume = pd.DataFrame()
monthly_dv = company_df['dollar_volume'][company_df['company']==company].resample('M').sum()
monthly_v = company_df['volume'][company_df['company']==company].resample('M').sum()
monthly_close = company_df['close'][company_df['company']==company].resample('M').mean()
monthly_volume['dollar_volume'] = monthly_dv
monthly_volume['volume'] = monthly_v
monthly_volume['close'] = monthly_close
monthly_volume['date'] = monthly_dv.index
monthly_volume['company'] = company
d.append(monthly_volume)
d = pd.concat(d)
d['company'].unique()
# %%
sectors = pd.read_csv('Data\sectors.csv')
# %%
s = []
for c in d['company']:
s.append(sectors['sector'][sectors['company']==c])
s = pd.concat(s)
d['sector'] = list(s)
# %%
fig = px.scatter(d, x="close", y="volume", animation_frame="date", animation_group="company", template='plotly_white',
size='dollar_volume',color="sector", hover_name="company", size_max=60, log_y=True, log_x=True, range_x=[60,21000], range_y=[250000,5994900000])
fig.update_layout(
title='Sectorwise Volume Data',
title_x=0.44,
yaxis_title='Volume',
xaxis_title='Price',
height=600,
# width=1200,
)
x_avg = d['close'].mean()
y_avg = d['volume'].mean()
fig.add_vline(x=x_avg, line_width=1, opacity=0.9)
fig.add_hline(y=y_avg, line_width=1, opacity=1)
fig.add_annotation(dict(font=dict(color="black",size=14),
x=0, y=-0.14,#data['score'].min()-0.2, y=data['wgt'].min()-0.2,
text="Low Volume - Low Price",
xref='paper',
yref='paper',
showarrow=False))
fig.add_annotation(dict(font=dict(color="black",size=14),
x=1, y=-0.14,#x=data['score'].max(), y=data['wgt'].min(),
text="Low Volume - High Price",
xref='paper',
yref='paper',
showarrow=False))
fig.add_annotation(dict(font=dict(color="black",size=14),
x=0, y=1.07, #x=data['score'].min(), y=data['wgt'].max(),
text="High Volume - Low Price",
xref='paper',
yref='paper',
showarrow=False))
fig.add_annotation(dict(font=dict(color="black",size=14),
x=1, y=1.07, #x=data['score'].max(), y=data['wgt'].max(),
text="High Volume - High Price",
xref='paper',
yref='paper',
showarrow=False))
fig.show()
plotly.offline.plot(fig, filename='scatterplot.html')
# %%
|
mathewjames/covid-impact-on-indian-stock-market
|
scatterplot.py
|
scatterplot.py
|
py
| 7,537 |
python
|
en
|
code
| 0 |
github-code
|
6
|
30469295270
|
# Integer multiplication using recursion
# Author: Prashanth Palaniappan
"""
Description:
This is an algorithm that performs multiplication of 2 integers of size n using recursion.
Solution:
The algorithm recursively divides the input numbers by half, until we are left with single digits. This is the base
case for recursion. Once we hit the base case, we perform regular multiplication of the single digits
"""
def multiply(x, y, n):
"""
:param x: input number 1
:param y: input number 2
:param n: length of the input numbers
:return: multiplied result of the 2 input numbers
a = first half of x
b = second half of x
c = first half of y
d = second half of y
x * y = ((10^n/2 * a) + b) * ((10^n/2 * c) + d)
= (10^n * ac) + (10^n/2 * (ad + bc)) + bd
"""
if n == 1:
return x * y
a = x // pow(10, n//2)
b = x - (a * pow(10, n//2))
c = y // pow(10, n//2)
d = y - (c * pow(10, n//2))
ac = multiply(a, c, len(str(a)))
bd = multiply(b, d, len(str(b)))
bc_plus_ad = multiply(a + b, c + d, len(str(a + b))) - ac - bd
mul_result = (pow(10, n//2) * pow(10, n//2) * ac) + (pow(10, n//2) * bc_plus_ad) + bd
return mul_result
# Get the input numbers from an user
x = int(input('First number: '))
y = int(input('Second number: '))
# Raise exception if input integers are not of equal length
if len(str(x)) != len(str(y)):
raise Exception('Please enter 2 integers of equal length')
# Get the size of input
n = len(str(x))
# Compute and print the result
result = int(multiply(x, y, n))
print(result)
|
prashpal/algorithms-and-datastructures
|
python/numbers/integer_multiplication.py
|
integer_multiplication.py
|
py
| 1,606 |
python
|
en
|
code
| 0 |
github-code
|
6
|
69960819069
|
print("Advent of Code Day 1 Exercise 2")
# Set local variables
y = int(0)
calories = int(0)
count = int(0)
calorielist = []
f = open("/Users/pauldobe/AOC_22/input_file_day_1", "r")
for x in f:
if x != "\n":
y = y + int(x)
else:
calorielist.append(y)
y = 0
calorielist.sort(reverse = True)
calories = calorielist[0] + calorielist[1] + calorielist[2]
print(calories)
f.close
ß
|
pdobek/AOC_22
|
AOC_1_2.py
|
AOC_1_2.py
|
py
| 415 |
python
|
en
|
code
| 0 |
github-code
|
6
|
9312776182
|
class Solution:
def findDuplicate(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
"""
Complexity: O(n)
Space: O(1)
Think of a linked list with a cycle in it somewhere.
(142. Linked List Cycle II)
Say fast pointer goes twice as fast as the slow pointer, and when they
meet at point A, they both must be within the cycle.
Now consider this, how much further has the fast pointer gone than the
slow pointer has? Some multiplier of the loop length, which is exactly
how much the slow pointer has gone before fast ans slow meet.
(Proof: Time of travel "t" for both fast and slow are the same,
Dist(fast) = v(fast) * t = 2 * v(slow) * t, so Dist(fast) - Dist(slow)
= 2 * v(slow) * t - v(slow) * t = v(slow) * t = Dist(slow)).
This means, the distant from starting point to point A must be the n
length of the loop length.
So when we set another point starting from the start point and its speed
is same as the slow pointer, if they walk a distance of n length of the
loop, **they will be at point A at the same time**, so they must enter the
loop at the same time. That's why the point where they first meet is the
entry point of the loop.
"""
if nums == []:
return -1
slow = nums[0]
fast = nums[nums[0]]
# find the point A they meet within the cycle
while slow != fast:
slow = nums[slow]
fast = nums[nums[fast]]
fast = 0
# find the entry point
while fast != slow:
fast = nums[fast]
slow = nums[slow]
return slow
|
acnokego/LeetCode
|
287_find_duplicate_num/two_ptr.py
|
two_ptr.py
|
py
| 1,775 |
python
|
en
|
code
| 0 |
github-code
|
6
|
23015453145
|
# Write a Pyhton Program to reverse a tule ?
# x=('p','y','t','h','o','n')
# y=print(reversed(x))
x = (2, 4, 6)
result = reversed(x)
result = tuple(result)
print(result)
|
JACK07770777/Python-Assignments
|
Module 3/Q26.py
|
Q26.py
|
py
| 181 |
python
|
en
|
code
| 0 |
github-code
|
6
|
70722602427
|
import requests
from bs4 import BeautifulSoup
from .componentParser import ComponentParser
from .utils import isRelativePostDate, getRelativePostDate
class BlogPost:
errorCount = 0
def __init__(self, url, isDevMode=False):
# 개발 편의
self.isDevMode = isDevMode
# init
self.url = url
self.postInframeUrl = ''
self.postEditorVersion = None
self.postLogNum = None
self.postDate = None
self.postInframeSoup = None
# init check
if self.isForeignUrl():
print("[INIT ERROR] URL이 잘못되었습니다. 프로그램을 종료합니다.")
exit(-1)
# ============================================================================================
# 개발편의용 프린트 함수
def printDevMessage(self, message):
if self.isDevMode:
print("[DEV MODE] " + message, end='\n')
# 유저가 입력한 URL 이 올바른지 체크하는 함수
def isForeignUrl(self):
self.printDevMessage("isForeignUrl execution")
if 'blog.naver.com' in self.url:
return False
else:
return True
# ============================================================================================
def postSetup(self):
try:
self.printDevMessage("== postSetup execution == ")
self.postInframeUrl = self.getPostInframeUrl()
self.postInframeSoup = self.getPostInframeSoup()
self.postEditorVersion = self.getPostEditorVersion()
self.postDate = self.getPostDate()
self.printDevMessage("== postSetup is clear == ")
# 여기서는 폴더 생성 체크까지만, 다 되었다면 run 함수로 넘긴다.
except Exception as e:
print(e)
def getPostInframeUrl(self):
self.printDevMessage("== getPostInframeUrl 실행 ==")
originHtml = requests.get(self.url).text
originSoup = BeautifulSoup(originHtml, features="html.parser")
for link in originSoup.select('iframe#mainFrame'):
postInframeUrl = "http://blog.naver.com" + link.get('src')
self.printDevMessage(f'return is : {postInframeUrl}')
return postInframeUrl
def getPostInframeSoup(self):
self.printDevMessage("== getPostInframeSoup execution ==")
if not (self.postInframeUrl == ''):
inframeHtml = requests.get(self.postInframeUrl).text
inframeSoup = BeautifulSoup(inframeHtml, features="html.parser")
self.printDevMessage(f'return is : {len(inframeSoup)} links')
return inframeSoup
else:
raise Exception("[ERROR] getPostInframeSoup가 정상적으로 실행되지 않았습니다.")
def getPostEditorVersion(self):
self.printDevMessage("== getPostEditorVersion execution ==")
for link in self.postInframeSoup.select('div#post_1'):
postEditiorVersion = link.get('data-post-editor-version')
if postEditiorVersion == None:
raise Exception("[ERROR] 지원하지 않는 에디터 버젼입니다.")
self.printDevMessage(f'return is : {postEditiorVersion}')
return postEditiorVersion
def getPostDate(self):
self.printDevMessage("== getPostDate execution ==")
links = self.postInframeSoup.select('span.se_publishDate')
if len(links) == 0:
raise Exception("[ERROR] 포스트 게시일을 찾지 못했습니다.")
else:
for link in links:
publishDate = link.get_text()
if isRelativePostDate(publishDate):
publishDate = getRelativePostDate(publishDate)
self.printDevMessage(f'return is : {publishDate}')
return publishDate
# ============================================================================================
def run(self, dirPath):
self.printDevMessage("== run execution ==")
self.postSetup()
filePath = dirPath + '/' + 'word.md'
ComponentParser.assetPath = dirPath + '/asset'
rawComponents = self.postInframeSoup.select('div.se-component')
try:
with open(filePath, mode='w', encoding='utf-8') as fp:
# 작성될 텍스트 데이터 초기화
data = ''
for i, component in enumerate(rawComponents):
if i == 0:
# 처음에는 무조건 헤더부분의 다큐먼트 타이틀이 나온다.
data += ComponentParser(component, isDevMode=self.isDevMode).parsingTitle()
continue
data += ComponentParser(component, skipSticker=self.isDevMode).parsing()
# last loop에서는 해시태그까지 추가해준다.
if i == (len(rawComponents) - 1):
txt = '해시태그 : '
for hashTag in ComponentParser.hashTagList:
txt += hashTag
data += ' ' + txt
# 작성
fp.write(data)
if ComponentParser.errorCounter != 0:
BlogPost.errorCount += 1
# 포스트 백업 후 클래스 변수 초기화
ComponentParser.hashTagList = []
ComponentParser.counter = 0
ComponentParser.errorCount = 0
return True
except Exception as e:
print(e)
return False
|
Jeongseup/naver-blog-backer
|
src/naverblogbacker/post.py
|
post.py
|
py
| 4,676 |
python
|
en
|
code
| 4 |
github-code
|
6
|
40315905815
|
##############################################
# Q1 --- find sum of all inputs
##############################################
# Read input - (f.read() for char-by-char read) & (loop file object for LINE-by-LINE reading)
with open('./1201.in', 'r') as f:
freqList = [line.strip() for line in f]
# Compute sum
from functools import reduce
summed = reduce(lambda acc, curr: acc + int(curr), freqList, 0)
# print(summed)
##############################################
# Q1 --- find first cumulative value that repeats
# --- use itertools (efficient looping) : cycle!! because need to REPEAT given input indefinitely
##############################################
from itertools import cycle
cumulative, visited = 0, set()
for n in cycle(freqList):
# print(summed)
visited.add(cumulative)
cumulative += int(n)
# print(cumulative)
if (cumulative in visited):
print(cumulative)
break
|
hdd2k/adventOfCode
|
2018/01/1201.py
|
1201.py
|
py
| 924 |
python
|
en
|
code
| 2 |
github-code
|
6
|
38358981811
|
import logging
from enum import Enum
import openai
from fastapi import Query
from openai.error import AuthenticationError, InvalidRequestError, RateLimitError
from tenacity import (
retry,
stop_after_attempt,
wait_random_exponential,
retry_if_exception_type,
) # for exponential backoff
from app.config.messages import (
model_description,
max_tokens_description,
temperature_description,
top_p_description,
presence_penalty_description,
frequency_penalty_description,
)
class Model(Enum):
TEXT_DAVINCI_003 = "text-davinci-003"
@retry(
wait=wait_random_exponential(min=2, max=5),
stop=stop_after_attempt(5),
retry=retry_if_exception_type(RateLimitError),
)
async def completions_with_backoff(**kwargs):
return await openai.Completion.acreate(**kwargs)
async def get_completions(
api_key: str,
message: str,
model: Model = Query(Model.TEXT_DAVINCI_003, description=model_description),
max_tokens: int = Query(2048, description=max_tokens_description),
temperature: float = Query(1, description=temperature_description),
top_p: float = Query(1, description=top_p_description),
presence_penalty: float = Query(0.5, description=presence_penalty_description),
frequency_penalty: float = Query(0.5, description=frequency_penalty_description),
):
openai.api_key = api_key
# https://platform.openai.com/docs/api-reference/completions
try:
result = await completions_with_backoff(
model=model.value,
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
presence_penalty=presence_penalty,
frequency_penalty=frequency_penalty,
prompt=message,
request_timeout=60,
)
except AuthenticationError as e:
logging.error(e)
return "The token is invalid."
except InvalidRequestError as e:
logging.error(e)
if "This model's maximum context length is 4097 tokens" in str(e):
return "너무 긴 답변을 유도하셨습니다."
else:
return "오류가 발생했습니다 :sob: 다시 시도해 주세요."
except Exception as e:
logging.exception(e)
return "오류가 발생했습니다 :sob: 다시 시도해 주세요."
try:
return result.get("choices")[0].get("text")
except KeyError as e:
logging.exception(e)
return "오류가 발생했습니다 :sob: 다시 시도해 주세요."
|
jybaek/Hello-ChatGPT
|
app/services/openai_completions.py
|
openai_completions.py
|
py
| 2,522 |
python
|
en
|
code
| 7 |
github-code
|
6
|
31819134582
|
"""
The field should look like this:
col0 col1 col2 col3 col4 col5 col6 col7 col8
||======|======|======||======|======|======||======|======|======||
|| A | A | A || B | B | B || C | C | C ||
row0||cell0 |cell1 |cell2 ||cell3 |cell4 |cell5 ||cell6 |cell7 |cell8 ||
||______|______|______||______|______|______||______|______|______||
|| A | A | A || B | B | B || C | C | C ||
row1||cell9 |cell10|cell11||cell12|cell13|cell14||cell15|cell16|cell17||
||______|______|______||______|______|______||______|______|______||
|| A | A | A || B | B | B || C | C | C ||
row2||cell18|cell19|cell20||cell21|cell22|cell23||cell24|cell25|cell26||
||======|======|======||======|======|======||======|======|======||
|| D | D | D || E | E | E || F | F | F ||
row3||cell27|cell28|cell29||cell30|cell31|cell32||cell33|cell34|cell35||
||______|______|______||______|______|______||______|______|______||
|| D | D | D || E | E | E || F | F | F ||
row4||cell36|cell37|cell38||cell39|cell40|cell41||cell42|cell43|cell44||
||______|______|______||______|______|______||______|______|______||
|| D | D | D || E | E | E || F | F | F ||
row5||cell45|cell46|cell47||cell48|cell49|cell50||cell51|cell52|cell53||
||======|======|======||======|======|======||======|======|======||
|| G | G | G || H | H | H || I | I | I ||
row6||cell54|cell55|cell56||cell57|cell58|cell59||cell60|cell61|cell62||
||______|______|______||______|______|______||______|______|______||
|| G | G | G || H | H | H || I | I | I ||
row7||cell63|cell64|cell65||cell66|cell67|cell68||cell69|cell70|cell71||
||______|______|______||______|______|______||______|______|______||
|| G | G | G || H | H | H || I | I | I ||
row8||cell72|cell73|cell74||cell75|cell76|cell77||cell78|cell79|cell80||
||======|======|======||======|======|======||======|======|======||
"""
import openpyxl
DIGITS = (1, 2, 3, 4, 5, 6, 7, 8, 9)
rows = []
cols = []
squares = []
cells = {}
class Cell:
def __init__(self, row, col, value=''):
self.possible_values = list(DIGITS)
self.value = value
self.isSolved = False
self.row = row
self.col = col
class CellGroup:
def __init__(self):
self.cells = []
self.possible_values = list(DIGITS)
def init_structure():
global rows
global cols
global squares
global cells
# Initialize empty rows, cols and squares
for index in range(0, 9):
rows.append(CellGroup())
cols.append(CellGroup())
squares.append(CellGroup())
# Initialize empty cells
for cell_index in range(0, 81):
cell_name = f'cell{cell_index}'
row_index = cell_index // 9
col_index = cell_index % 9
# Create cell from class
cells[cell_name] = Cell(row_index, col_index)
# Adding it to a row and cols list
rows[row_index].cells.append(cells[cell_name])
cols[col_index].cells.append(cells[cell_name])
# Adding squares
# Maybe someday something shorter and not that straightforward?
if row_index < 3:
if col_index < 3:
squares[0].cells.append(cells[cell_name])
elif 3 <= col_index < 6:
squares[1].cells.append(cells[cell_name])
elif 6 <= col_index < 9:
squares[2].cells.append(cells[cell_name])
elif 3 <= row_index < 6:
if col_index < 3:
squares[3].cells.append(cells[cell_name])
elif 3 <= col_index < 6:
squares[4].cells.append(cells[cell_name])
elif 6 <= col_index < 9:
squares[5].cells.append(cells[cell_name])
elif 6 <= row_index < 9:
if col_index < 3:
squares[6].cells.append(cells[cell_name])
elif 3 <= col_index < 6:
squares[7].cells.append(cells[cell_name])
elif 6 <= col_index < 9:
squares[8].cells.append(cells[cell_name])
def read_puzzle_xls():
"""Read initial know values from Excel"""
global cells
exlw = openpyxl.load_workbook('sudoku.xlsx',
read_only=True,
data_only=True)
ws = exlw.active
for cell_name, cell in cells.items():
excel_cell_row = cell.row+1
excel_cell_column = cell.col+1
if ws.cell(excel_cell_row, excel_cell_column).value is not None:
cell.value = ws.cell(
excel_cell_row, excel_cell_column
).value
cell.possible_values.clear()
def sanity_check():
"""If cell has value - clear possible_values"""
global cells
for cell_name, cell in cells.items():
if cell.value is not None:
cell.possible_values.clear()
def solve_group(group):
for cell in group.cells:
# Removing know values from line possible values
if (cell.value != '') and (cell.value in group.possible_values):
group.possible_values.remove(cell.value)
# Removing line impossible values from cell possible values
for cell in group.cells:
for cell_pv in cell.possible_values:
if cell_pv not in group.possible_values:
cell.possible_values.remove(cell_pv)
# Set value if only 1 possible value available
if len(cell.possible_values) == 1:
cell.value = cell.possible_values.pop()
def print_puzzle_debug():
"""Prints puzzle results and debug"""
global cells
global rows
for row in rows:
OutputLine = ''
for cell in row.cells:
OutputLine += str(cell.value) + ' '
print(OutputLine)
def solve_puzzle():
"""Main program to solve the puzzle"""
# Geeting all the cells without value to list
unresolved_cells = []
for cell_name, cell in cells.items():
if cell.value == '':
unresolved_cells.append(cell_name)
# Solving only unknown cells
for cell_name in unresolved_cells:
# Solving groups
for group in rows + cols + squares:
solve_group(group)
init_structure()
read_puzzle_xls()
solve_puzzle()
print_puzzle_debug()
|
stanislavstarkov/sudoku
|
sudoku.py
|
sudoku.py
|
py
| 6,469 |
python
|
en
|
code
| 0 |
github-code
|
6
|
34849239284
|
import sqlite3
def execute_with_output(conn, query_txt, fetch_quant="one"):
"""
Takes the connection file variable and executes the query text within that connection
:param fetch_quant:
:param conn:
:param query_txt:
:return:
"""
try:
c = conn.cursor()
c.execute(query_txt)
if fetch_quant == "one":
return c.fetchone()
else:
return c.fetchall()
except sqlite3.Error as e:
print(e)
def execute_no_output(conn, query_txt):
"""
:param conn:
:param query_txt:
:return:
"""
try:
c = conn.cursor()
c.execute(query_txt)
except sqlite3.Error as e:
print(e)
|
James-Rocker/data_engineering_portfolio
|
working_with_sqllite/query/__init__.py
|
__init__.py
|
py
| 705 |
python
|
en
|
code
| 0 |
github-code
|
6
|
31564356652
|
import svgwrite.extensions
import faiss
import numpy as np
import matplotlib.pyplot as plt
import svgwrite
import networkx as nx
import src.particle_utils as particle_utils
if __name__ == "__main__":
page_size = (11 * 96, 17 * 96)
max_iterations = 1000
max_particles = 1000
index_training_node_count = 100000
grid_size = 2000
np.random.seed(1234)
center_bias = 0
d = 2
# particle radius
p_radius = 2
gen_radius = 2
# Construct the index
nlist = int(10 * np.sqrt(index_training_node_count))
quantizer = faiss.IndexFlatL2(d) # the other index
index = faiss.IndexIVFFlat(quantizer, d, int(nlist))
initial_vecs = (np.random.uniform(-1*grid_size, grid_size, (123318, 2))).astype('float32')
index.train(initial_vecs)
index.nprobe = 2
# initialize the count of particles
fixed_particles = np.zeros((1, d), dtype='float32')
live_particles = 1
moving_particles = particle_utils.init_moving_particles(live_particles, gen_radius, d)
index.add(fixed_particles)
# begin adding vectors to the index.
a = 1
particle_count = 1
parent_indices = []
i = 0
last_particle_count = 1
last_iteration = 0
while particle_count < max_particles and i < max_iterations:
i += 1
# Increase the number of particles as the bounding circle gets larger
if a*np.sqrt(particle_count)-5 > len(moving_particles):
live_particles = int(np.sqrt(particle_count) * a)
moving_particles = particle_utils.init_moving_particles(live_particles, gen_radius, d)
print(f"Live: {live_particles:4}, Total: {particle_count:6}, on iteration {i:6} particles gained/iterations {(live_particles-last_particle_count)/(i-last_iteration)}")
last_particle_count = live_particles
last_iteration = i
D, I = index.search(moving_particles, 1)
fixing_indices = D[:, 0] < p_radius ** 2
parent_indices.extend(I[fixing_indices])
if any(fixing_indices):
particle_count += sum(fixing_indices)
fixing_particles = moving_particles[fixing_indices]
index.add(fixing_particles)
moving_particles, gen_radius = particle_utils.regenerate_fixed_particle(moving_particles, fixing_indices, gen_radius)
moving_particles += np.random.normal(0, 1, (live_particles, d)).astype('float32')
moving_particles -= moving_particles * center_bias/np.linalg.norm(moving_particles, axis=1, keepdims=True)
moving_particles = particle_utils.regenerate_extreme_particles(moving_particles, gen_radius)
# Reconstruct the points in the order they were added.
index.make_direct_map()
fixed_particles = index.reconstruct_n(0, int(particle_count)) + np.asarray(page_size)/2
parent_indices = np.concatenate(parent_indices)
parents = fixed_particles[parent_indices]
# Build a graph
G = nx.graph.Graph()
for ind in range(len(fixed_particles)):
G.add_node(ind)
if ind > 0:
G.add_edge(parent_indices[ind-1], ind)
# Iterate over the edges of the graph
edges = list(nx.algorithms.traversal.edgedfs.edge_dfs(G, source=0))
grouped_edges = []
for a_edge in edges:
if len(grouped_edges) == 0 or grouped_edges[-1][-1] != a_edge[0]:
grouped_edges.append(list(a_edge))
else:
grouped_edges[-1].append(a_edge[-1])
# Group the nodes together
group_strs = []
paths = []
fig, ax = plt.subplots()
# Write the path
dwg = svgwrite.Drawing("../outputs/out.svg", size=page_size)
inkscape = svgwrite.extensions.Inkscape(dwg)
layer = inkscape.layer()
dwg.add(layer)
for a_group in grouped_edges:
curr_pnts = fixed_particles[a_group].astype('int')
layer.add(svgwrite.shapes.Polyline(curr_pnts.tolist(),
stroke="black",
fill='none'))
dwg.save()
|
neoques/dla-python
|
src/virnolli.py
|
virnolli.py
|
py
| 3,998 |
python
|
en
|
code
| 0 |
github-code
|
6
|
29013781998
|
import sys,usb,struct
USB_TIMEOUT_DEFAULT = 1000
SMS_EP_IN = 0x81
SMS_EP_OUT = 0x02
HIF_TASK = 11
class SMS1180USB:
def __init__(self, dev, timeout=USB_TIMEOUT_DEFAULT):
self.dev = dev
self.timeout = timeout
def usb_read(self):
try:
return bytes(self.dev.read(SMS_EP_IN, 512, self.timeout))
except usb.core.USBTimeoutError:
return None
def usb_write(self, data):
try:
return self.dev.write(SMS_EP_OUT, data, self.timeout)
except usb.core.USBTimeoutError:
return False
def msg_send_req_ex(self, request, src_id, dst_id, flags, payload):
data = struct.pack("<HBBHH", request, src_id, dst_id, len(payload) + 8, flags) + payload
self.usb_write(data)
def msg_send_req(self, request, payload=bytes([])):
return self.msg_send_req_ex(request, 0, HIF_TASK, 0, payload)
#return: response, src_id, dst_id, length, flags, payload
def msg_get_resp_ex(self):
data = self.usb_read()
if data == None or len(data) < 8:
return None,
else:
response, src_id, dst_id, length, flags = struct.unpack("<HBBHH", data[0:8])
return response, src_id, dst_id, length, flags, data[8:]
#return: response, payload
def msg_get_resp(self):
ret = self.msg_get_resp_ex()
if len(ret) == 1:
return None,
else:
return ret[0], ret[5]
|
fxsheep/helloworld-anyware
|
src/siano/sms1180/sms1180usb.py
|
sms1180usb.py
|
py
| 1,474 |
python
|
en
|
code
| 4 |
github-code
|
6
|
28564753746
|
import sys
import urllib.request
START_MARKERS = {
'title': '<h1 class="header"> <span class="itemprop" itemprop="name">',
'year': '<span class="nobr">(<a href="/year/',
'genres': '<h4 class="inline">Genres:</h4>',
'genre': '> ',
'languages': '<h4 class="inline">Language:</h4>',
'language': "itemprop='url'>"
}
def extract(text, start_marker, end_marker):
return [item.split(end_marker)[0]
for item in text.split(start_marker)[1:]]
if __name__ == '__main__':
if len(sys.argv) < 2:
print("Usage: %s imdb_id" % sys.argv[0])
sys.exit(1)
imdb_id = sys.argv[1]
imdb_url = 'http://www.imdb.com/title/tt' + imdb_id
with urllib.request.urlopen(imdb_url) as html_file:
html = html_file.read().decode('utf-8')
title = extract(html, START_MARKERS['title'], end_marker='<')[0]
print(title)
year = extract(html, START_MARKERS['year'], end_marker='/')[0]
print(year)
genres_html = extract(html, START_MARKERS['genres'], end_marker='</div>')[0]
genres = extract(genres_html, START_MARKERS['genre'], end_marker='<')
print(genres)
languages_html = extract(html, START_MARKERS['languages'], end_marker='</div>')[0]
languages = extract(languages_html, START_MARKERS['language'], end_marker='<')
print(languages)
|
PythonAnkara/basicimdb
|
imdb08.py
|
imdb08.py
|
py
| 1,325 |
python
|
en
|
code
| 0 |
github-code
|
6
|
9135098878
|
# -*- coding: utf-8 -*-
import lzma
import os
import shutil
from datetime import datetime
from datetime import timedelta
import hglib
from bugbug import bugzilla
from bugbug import labels
from bugbug import repository
from bugbug_data.secrets import secrets
from cli_common.log import get_logger
from cli_common.taskcluster import get_service
from cli_common.utils import ThreadPoolExecutorResult
logger = get_logger(__name__)
class Retriever(object):
def __init__(self, cache_root, client_id, access_token):
self.cache_root = cache_root
assert os.path.isdir(cache_root), 'Cache root {} is not a dir.'.format(cache_root)
self.repo_dir = os.path.join(cache_root, 'mozilla-central')
self.client_id = client_id
self.access_token = access_token
self.index_service = get_service('index', client_id, access_token)
def retrieve_commits(self):
shared_dir = self.repo_dir + '-shared'
cmd = hglib.util.cmdbuilder('robustcheckout',
'https://hg.mozilla.org/mozilla-central',
self.repo_dir,
purge=True,
sharebase=shared_dir,
networkattempts=7,
branch=b'tip')
cmd.insert(0, hglib.HGPATH)
proc = hglib.util.popen(cmd)
out, err = proc.communicate()
if proc.returncode:
raise hglib.error.CommandError(cmd, proc.returncode, out, err)
logger.info('mozilla-central cloned')
repository.download_commits(self.repo_dir)
logger.info('commit data extracted from repository')
self.compress_file('data/commits.json')
def retrieve_bugs(self):
bugzilla.set_token(secrets[secrets.BUGZILLA_TOKEN])
six_months_ago = datetime.utcnow() - timedelta(182)
two_years_and_six_months_ago = six_months_ago - timedelta(365)
logger.info('Downloading bugs from {} to {}'.format(two_years_and_six_months_ago, six_months_ago))
bugzilla.download_bugs_between(two_years_and_six_months_ago, six_months_ago)
logger.info('Downloading labelled bugs')
bug_ids = labels.get_all_bug_ids()
bugzilla.download_bugs(bug_ids)
self.compress_file('data/bugs.json')
def compress_file(self, path):
with open(path, 'rb') as input_f:
with lzma.open('{}.xz'.format(path), 'wb') as output_f:
shutil.copyfileobj(input_f, output_f)
def go(self):
with ThreadPoolExecutorResult(max_workers=2) as executor:
# Thread 1 - Download Bugzilla data.
executor.submit(self.retrieve_bugs)
# Thread 2 - Clone mozilla-central and retrieve commit data.
executor.submit(self.retrieve_commits)
# Index the task in the TaskCluster index.
self.index_service.insertTask(
'project.releng.services.project.{}.bugbug_data.latest'.format(secrets[secrets.APP_CHANNEL]),
{
'taskId': os.environ['TASK_ID'],
'rank': 0,
'data': {},
'expires': (datetime.utcnow() + timedelta(31)).strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
}
)
|
chutten/release-services
|
src/bugbug/data/bugbug_data/retriever.py
|
retriever.py
|
py
| 3,307 |
python
|
en
|
code
| null |
github-code
|
6
|
12733893864
|
commands = [
'!help',
'!done',
'plain',
'bold',
'italic',
'header',
'link',
'inline-code',
'new-line',
'ordered-list',
'unordered-list'
]
outputs = []
def get_formatter():
formatter = input("Choose a formatter: ")
return formatter
def format_new_line():
markdown = '\n'
outputs.append(markdown)
print("".join(outputs))
def format_plain():
markdown = input("Text: ")
outputs.append(markdown)
print("".join(outputs))
def format_bold():
text = input("Text: ")
markdown = "**" + text + "**"
outputs.append(markdown)
print("".join(outputs))
def format_italic():
text = input("Text: ")
markdown = "*" + text + "*"
outputs.append(markdown)
print("".join(outputs))
def format_inline_code():
text = input("Text: ")
markdown = "`" + text + "`"
outputs.append(markdown)
print("".join(outputs))
def format_header():
level = int(input("Level: "))
if not (1 <= level <= 6):
print("The level should be within the range of 1 to 6")
else:
text = input("Text: ")
markdown = ('#' * level) + ' ' + text + '\n'
outputs.append(markdown)
print("".join(outputs))
def format_link():
label = input("Label: ")
url = input("URL: ")
markdown = f'[{label}]({url})'
outputs.append(markdown)
print("".join(outputs))
def format_list(list_type):
row_count = int(input("Number of rows: "))
if row_count <= 0:
print("The number of rows should be greater than zero")
return format_list(list_type)
if list_type == 'ordered-list':
for i in range(1, row_count + 1):
element_i = input(f'Row #{i}: ')
row = f'{i}. ' + element_i + '\n'
outputs.append(row)
elif list_type == 'unordered-list':
for i in range(1, row_count + 1):
element_i = input(f'Row #{i}: ')
row = '* ' + element_i + '\n'
outputs.append(row)
for output in outputs:
print(output, sep='', end='')
print('')
def execute(command):
while True:
if command == '!help':
print("""Available formatters: plain bold italic header link inline-code new-line
Special commands: !help !done""")
return execute(get_formatter())
elif command == '!done':
save = open('output.md', 'w')
for output in outputs:
save.write(output)
save.close()
break
elif command == 'plain':
format_plain()
return execute(get_formatter())
elif command == 'bold':
format_bold()
return execute(get_formatter())
elif command == 'italic':
format_italic()
return execute(get_formatter())
elif command == 'inline-code':
format_inline_code()
return execute(get_formatter())
elif command == 'link':
format_link()
return execute(get_formatter())
elif command == 'header':
format_header()
return execute(get_formatter())
elif command == 'new-line':
format_new_line()
return execute(get_formatter())
elif 'list' in command:
format_list(command)
return execute(get_formatter())
else:
print("Unknown formatting type or command")
return execute(get_formatter())
execute(get_formatter())
|
CRowland4/markdown_editor
|
Markdown_Editor.py
|
Markdown_Editor.py
|
py
| 3,661 |
python
|
en
|
code
| 0 |
github-code
|
6
|
72453543549
|
#!/usr/bin/python3
from tkinter import Image
import rospy
import sys
import cv2
from cv_bridge import CvBridge, CvBridgeError
class viewer:
def __init__(self):
self.bridge = CvBridge()
#
self.image_rgb_sub = rospy.Subscriber("/camera/color/image_raw",Image,self.callback)
self.image_depth_sub = rospy.Subscriber("/camera/depth/image_rect_raw",Image,self.callback)
def callback(self,data):
try:
cv_image = self.bridge.imgmsg_to_cv2(data,"bgr8")
except CvBridgeError as error:
print(error)
cv2.imshow("ball + depth",cv_image)
cv2.waitKey(30)
def main(args):
v = viewer()
rospy.init_node("image_pub",anonymous=True)
rospy.loginfo('image_pub node started')
try:
rospy.spin()
except KeyboardInterrupt:
print("Shutting down")
cv2.destroyAllWindows()
if __name__ == '__main__':
try:
main(sys.argv)
except rospy.ROSInterruptException:
pass
|
Yandong-Luo/hybrid
|
src/Vision/detect_ball/nodes/image_pub.py
|
image_pub.py
|
py
| 997 |
python
|
en
|
code
| 1 |
github-code
|
6
|
7615725014
|
import math
class Circle:
def __init__(self, centre, radius):
self.centre = centre
self.radius = radius
def isInside(self, rectangle):
x = (rectangle.rightLower.x + rectangle.leftUpper.x) / 2
y = (rectangle.leftUpper.y + rectangle.rightLower.y) / 2
distance = math.sqrt(math.pow(x - self.centre.x, 2) + math.pow(y - self.centre.y, 2))
return distance
|
Tariod/what-is-near
|
lib/Circle.py
|
Circle.py
|
py
| 410 |
python
|
en
|
code
| 0 |
github-code
|
6
|
18528705421
|
# Section12-1
# 파이썬 데이터베이스 연동(SQLite)
# 테이블 생성 및 삽입
import datetime
import sqlite3
# 삽입 날짜 생성
now = datetime.datetime.now()
print('now', now)
nowDatetime = now.strftime('%Y-%m-%d %H:%M:%S')
print('now Datetime', nowDatetime)
# sqlite3 버전
print('sqlite3.version : ', sqlite3.version)
print('sqlite3.sqlite_version', sqlite3.sqlite_version)
print()
# DB생성 & Autocommit & Rollback
# Commit : DB에 변경사항을 반영하는 명령어
# Autocommit : 변경사항을 바로바로 DB에 반영
# Rollback : 변경사항 되돌리기
# 본인 DB 파일 경로
conn = sqlite3.connect('database/database.db', isolation_level=None)
# DB생성(메모리)
# conn = sqlite3.connect(":memory:")
# Cursor연결
c = conn.cursor()
print('Cursor Type : ', type(c))
# 테이블 생성(Datatype : TEXT NUMERIC INTEGER REAL BLOB)
c.execute(
"CREATE TABLE IF NOT EXISTS users(id INTEGER PRIMARY KEY, username text, email text, phone text, website text, regdate text)") # AUTOINCREMENT
# 데이터 삽입
c.execute("INSERT INTO users VALUES (1 ,'Kim','[email protected]', '010-0000-0000', 'Kim.com', ?)", (nowDatetime,))
c.execute("INSERT INTO users(id, username, email, phone, website, regdate) VALUES (?, ?, ?, ?, ?, ?)",
(2, 'Park', '[email protected]', '010-1111-1111', 'Park.com', nowDatetime))
# Many 삽입(튜플, 리스트)
userList = (
(3, 'Lee', '[email protected]', '010-2222-2222', 'Lee.com', nowDatetime),
(4, 'Cho', '[email protected]', '010-3333-3333', 'Cho.com', nowDatetime),
(5, 'Yoo', '[email protected]', '010-4444-4444', 'Yoo.com', nowDatetime)
)
c.executemany(
"INSERT INTO users(id, username, email, phone, website, regdate) VALUES (?, ?, ?, ?, ?, ?)", userList)
print()
# 테이블 데이터 삭제
print("users db deleted :", conn.execute("delete from users").rowcount, "rows")
# 커밋 : isolation_level=None 일 경우 Auto Commit(자동 반영)
# conn.commit() # 자동 반영이 아닐 경우 반영 시켜주기위한 명령어
# 롤백
# conn.rollback() # Auto Commit 시에는 사용 불가
# 접속 해제
conn.close()
|
dailyco/python-study
|
src/section12_1.py
|
section12_1.py
|
py
| 2,122 |
python
|
ko
|
code
| 2 |
github-code
|
6
|
19981883727
|
import time
import requests
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
import datetime
import csv
import json
def get_data():
current_time = datetime.datetime.now().strftime('%m-%d-%Y')
with open(f'data/{current_time}_labirint.csv', 'w', newline='', encoding='utf-8-sig') as file:
writer = csv.writer(file, delimiter=';')
writer.writerow(
[
'Название книги',
'Автор',
'Издательство',
'Цена без скидки',
'Цена со скидкой',
'Процент скидки',
'Наличие на складе'
]
)
ua = UserAgent()
headers = {
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,'
'application/signed-exchange;v=b3;q=0.9',
'User-Agent': ua.random,
}
url = 'https://www.labirint.ru/genres/2498/?display=table&available=1'
response = requests.get(url=url, headers=headers)
soup = BeautifulSoup(response.text, 'lxml')
pages_count = int(soup.find('div', class_='pagination-numbers').find_all('a')[-1].text.strip())
books_data = []
for page in range(1,pages_count + 1):
url = f'https://www.labirint.ru/genres/2498/?display=table&available=1&page={page}'
response = requests.get(url=url, headers=headers)
soup = BeautifulSoup(response.text, 'lxml')
books = soup.find('tbody', class_='products-table__body').find_all('tr')
for book in books:
book_data = book.find_all('td')
try:
book_title = book_data[0].find('a').text
if not book_title:
continue
except AttributeError:
continue
try:
book_author = ', '.join(list(map(lambda link: link.text, book_data[1].find_all('a'))))
except AttributeError:
book_author = 'Нет автора'
try:
book_publisher = ': '.join(list(map(lambda publisher: publisher.text, book_data[2].find_all('a'))))
except AttributeError:
book_publisher = 'Нет издательства'
try:
old_price = int(book_data[3].find(class_='price-gray').text.replace('₽', '').replace(' ', '').strip())
except AttributeError:
old_price = 'Нет старой цены'
try:
new_price = int(book_data[3].find(class_='price-val').text.replace('₽', '').replace(' ', '').strip())
except AttributeError:
new_price = 'Нет новой цены'
try:
discount = f'{round(((old_price - new_price) / old_price) * 100, 2)} %'
except TypeError:
discount = 'Скидки нет'
try:
availability = book_data[-1].find(class_='mt3 rang-available').text.replace(' ', '').strip()
except AttributeError:
availability = 'Нет данных'
books_data.append(
{
'book_title': book_title,
'book_author': book_author,
'book_publisher': book_publisher,
'old_price': f'{old_price}₽' if type(old_price) is int else old_price,
'new_price': f'{new_price}₽' if type(new_price) is int else new_price,
'discount': discount,
'availability': availability,
}
)
with open(f'data/{current_time}_labirint.csv', 'a', newline='', encoding='utf-8-sig') as file:
writer = csv.writer(file, delimiter=';')
writer.writerow(
[
book_title,
book_author,
book_publisher,
f'{old_price}₽' if type(old_price) is int else old_price,
f'{new_price}₽' if type(new_price) is int else new_price,
discount,
availability
]
)
print(f'Обработано {page}/{pages_count} страниц')
with open(f'data/{current_time}-labirint.json', 'w', encoding='utf-8') as file:
json.dump(books_data, file, indent=4, ensure_ascii=False)
def main():
start_time = time.time()
get_data()
diff_time = time.time() - start_time
print(f'Затраченное время на работу скрипта - {diff_time}')
if __name__ == '__main__':
main()
|
Baradys/scrappers
|
scrappers/labirint/labirint.py
|
labirint.py
|
py
| 4,771 |
python
|
en
|
code
| 0 |
github-code
|
6
|
37301787598
|
import random
n=int(input("n="))
mas=random.sample(range(-100,100),n)
print(mas)
print("Мінімальний від'ємний елемент",min(mas))
S=0
for i in mas:
if i<0:
S=S+i
print("Сума від'ємних елементів масиву=",S)
k=0
for i in mas:
if i>0:
k=k+1
print("Кількість додатніх елементів масиву= ",k)
for i in range (len(mas)):
if mas[i]>0:
print("Додатній елемент масиву = ",mas[i])
|
oly17/-
|
лб 1 30 варыант/3.py
|
3.py
|
py
| 550 |
python
|
uk
|
code
| 0 |
github-code
|
6
|
27214244555
|
# 1:57 시작, 12:18 종료
def solution(N, stages):
answer = []
user = [0] * (N + 2) # 스테이지에 도달한 사람
fail = [0] * (N + 2) # 스테이지에 머물러 있는 사람
fail_rate = [] # 실패율
for s in stages:
for i in range(1, s + 1):
user[i] += 1
if i == s:
fail[i] += 1
for i in range(N):
if user[i + 1] == 0:
fail_rate.append((0, i + 1))
else:
fail_rate.append((fail[i + 1] / user[i + 1], i + 1))
fail_rate.sort(key=lambda x: (-x[0], x[1]))
for f in fail_rate:
answer.append(f[1])
return answer
|
hammii/Algorithm
|
python_practice/정렬/실패율.py
|
실패율.py
|
py
| 657 |
python
|
ko
|
code
| 2 |
github-code
|
6
|
31357541271
|
import argparse, sys
import tornado.ioloop
import tornado.gen
import time
from nats.io.client import Client as NATS
def show_usage():
print("nats-sub SUBJECT [-s SERVER] [-q QUEUE]")
def show_usage_and_die():
show_usage()
sys.exit(1)
@tornado.gen.coroutine
def main():
# Parse the command line arguments
parser = argparse.ArgumentParser()
# e.g. nats-sub hello -s nats://127.0.0.1:4222
parser.add_argument('subject', default='hello', nargs='?')
parser.add_argument('-s', '--servers', default=[], action='append')
parser.add_argument('-q', '--queue', default="")
# Parse!
args = parser.parse_args()
# Create client and connect to server
nc = NATS()
servers = args.servers
if len(args.servers) < 1:
servers = ["nats://127.0.0.1:4222"]
opts = {"servers": servers}
yield nc.connect(**opts)
@tornado.gen.coroutine
def handler(msg):
print("[Received: {0}] {1}".format(msg.subject, msg.data))
print("Subscribed to '{0}'".format(args.subject))
yield nc.subscribe(args.subject, args.queue, handler)
if __name__ == '__main__':
main()
tornado.ioloop.IOLoop.current().start()
|
nats-io/nats.py2
|
examples/nats-sub/__main__.py
|
__main__.py
|
py
| 1,186 |
python
|
en
|
code
| 62 |
github-code
|
6
|
8366434140
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
import os
import file
def get_input_file(filepath, filename):
"""
从标准输入读取内容,存入文件中
:param filepath:文件存储路径
:param filename:文件名
:return:
"""
# 打开文件,进行读写,如果不存在,创建新文件
file01 = open(filepath + filename, 'w+')
# 从标准输入循环读取内容,知道输入的是,结束输入
print("当前指针位置:", file01.tell())
in_str = input("请输入内容:")
while in_str != "end":
file01.write(in_str + "\n")
in_str = input()
# 将缓存区文件写入文件
file01.flush()
print("当前指针位置:", file01.tell())
print("当前操作文件:", file01.name, ",打开模式是:", file01.mode, ",关闭状态是:", file01.closed)
# 关闭文件
if not file01.closed:
file01.close()
print("当前操作文件:", file01.name, ",打开模式是:", file01.mode, ",关闭状态是:", file01.closed)
return
def get_file_details(filepath, filename):
"""
读取指定文件内容
:param filepath:文件存储路径
:param filename:文件名
:return:
"""
file01 = open(filepath + filename, 'r+')
print("文件:", filepath + filename, "的内容是:\n")
for line in file01.readlines():
print(line, end="")
if not file01.closed:
file01.close()
return
def copy_file_func(srcfile01, srcfile02):
"""
复制文件,将文件filename01的内容复制到filename02中。
如果filename01不存在,返回提示结果。
:param srcfile01:
:param srcfile02:
:return:
"""
if os.path.exists(srcfile01):
file01 = open(srcfile01, 'r+')
file02 = open(srcfile02, 'w+')
for line in file01.readlines():
file02.write(line)
if not file01.closed:
file01.close()
if not file02.closed:
file02.close()
else:
print("文件:", srcfile01, " 不存在!")
return
def file_merge_func(srcfile01, srcfile02, tarfile):
"""
合并文件srcfile01和srcfile02,将结果存入tarfile
:param srcfile01: 来源文件01
:param srcfile02: 来源文件02
:param tarfile: 目标文件
:return:
"""
if os.path.exists(srcfile01) and os.path.exists(srcfile02):
file01 = open(srcfile01, 'r+')
file02 = open(srcfile02, 'r+')
file03 = open(tarfile, 'w+')
for line in file01.readlines():
file03.write(line)
for line in file02.readlines():
file03.write(line)
elif not os.path.exists(srcfile01):
print("文件:", srcfile01, " 不存在!")
else:
print("文件:", srcfile02, " 不存在!")
if not file01.closed:
file01.close()
if not file02.closed:
file02.close()
if not file03.closed:
file03.close()
return
def rename_file_func(oldfilename, newfilename):
"""
修改文件名
:param oldfilename:
:param newfilename:
:return:
"""
if os.path.exists(oldfilename):
os.rename(oldfilename, newfilename)
return
def remove_file_func(filename):
if os.path.exists(filename):
os.remove(filename)
return
def path_change_func(basepath):
"""
目录创建和目录变更的练习
:param basepath:
:return:
"""
if not os.path.exists(basepath + "\\a01filetest"):
os.mkdir(basepath + "\\a01filetest")
print(os.getcwd())
os.chdir(basepath + "\\a01filetest")
print(os.getcwd())
return
def file_join_func():
"""
实现文件的关联
可以指定文件,指定管理键
:return:
"""
base_path = "D:\\02helloWorld\\03Python\\a01pythonLearn\\file\\"
user_grade = {}
user_student = {}
user_list = []
with open(base_path + "p086_grade.txt", "r+", encoding="utf-8") as fin_grade, \
open(base_path + "p086_student.txt", "r+", encoding="utf-8") as fin_student, \
open(base_path + "p086.txt", "w+", encoding="utf-8") as fout:
try:
for x in fin_grade.readlines():
user_id, grade = x.strip().split(",")
user_grade[user_id] = grade
for x in fin_student.readlines():
user_id, user_name = x.strip().split(",")
user_student[user_id] = user_name
if user_id in user_grade:
user = [user_id, user_name, user_grade[user_id]]
else:
user = [user_id, user_name, "未匹配到成绩"]
user_list.append(user)
for user in user_list:
user.append("\n")
fout.write(",".join(user))
except BaseException as be:
print(f"异常: {be}")
print(user_grade)
print(user_student)
print(user_list)
return
# 文件存储根目录
base_path = "/file\\"
# get_input_file(base_path, "inputFile.txt")
# get_file_details(base_path, "inputFile.txt")
# copy_file_func(base_path + "file02.txt", base_path + "file03.txt")
# file_merge_func(base_path + "file02.txt", base_path + "file03.txt", base_path + "file04.txt")
# rename_file_func(base_path + "file04.txt", base_path + "file05.txt")
# copy_file_func(base_path + "file05.txt", base_path + "file04.txt")
# remove_file_func(base_path + "file05.txt")
# path_change_func(base_path)
file_join_func()
|
renxiaowei-1991/pythonLearn
|
a01PythonLearn/package/b02BaseModule/c02FileIOType.py
|
c02FileIOType.py
|
py
| 5,515 |
python
|
en
|
code
| 0 |
github-code
|
6
|
37176772654
|
# Реализуйте RLE алгоритм: реализуйте модуль сжатия и восстановления данных.
# Входные и выходные данные хранятся в отдельных текстовых файлах.
with open ('file_5.4.txt', 'r') as data:
text = data.readline()
# print(text)
def rle_coding(text):
rle_text = ''
count =1
char = text[0]
for i in range(1, len(text)):
if text[i] == char:
count +=1
else:
rle_text += str(count) + char
char = text[i]
count = 1
rle_text += str(count) + char
return rle_text
def rle_decoding(text):
new_text = ''
i = 0
while len(text[i: i + 2]) == 2:
num, char = text[i: i + 2]
new_text += char * int(num)
i += 2
return new_text
coding_text = rle_coding(text)
# print(rle_codding(text))
decoding_text = rle_decoding(coding_text)
# print(rle_decooding(codding_text))
with open ('file_5.4_coding.txt', 'w', encoding = 'utf-8') as data:
data.write(coding_text)
with open('file_5.4_decoding.txt', 'w', encoding = 'utf-8') as data:
data.write(decoding_text)
|
Svetabtr/Homework_Python
|
hometask_5.4.py
|
hometask_5.4.py
|
py
| 1,192 |
python
|
en
|
code
| 0 |
github-code
|
6
|
9259336526
|
import numpy as np
from calculateZ import WeightCrossData, AddBiasToProduct
from output import Softmax_activation
from activationFunctions import Tanh,Relu,Sigmoid
class FeedForwardNeuralNetwork:
def __init__(self,S):
self.biases = []
self.Weights = []
for i in range(len(S)-1):
self.Weights.append(np.random.randn(S[i], S[i+1]) / np.sqrt(S[i]))
self.biases.append(np.random.randn(S[i+1]).reshape(1, S[i+1]))
def findLoss(self, X, y,activFun):
dotProduct = WeightCrossData()
adddBias = AddBiasToProduct()
if(activFun == "tanh"):
activationFunctions = Tanh()
elif(activFun == "sigmoid"):
activationFunctions = Sigmoid()
else:
activationFunctions = Relu()
softmax_activation = Softmax_activation()
inputTaken = X
for i in range(len(self.Weights)):
dp = dotProduct.prop_forward(self.Weights[i], inputTaken)
zobt = adddBias.prop_forward(dp, self.biases[i])
inputTaken = activationFunctions.prop_forward(zobt)
return softmax_activation.calculate_loss(inputTaken, y)
def predict(self, X,activFun):
dotProduct = WeightCrossData()
adddBias = AddBiasToProduct()
if(activFun == "tanh"):
activationFunctions = Tanh()
elif(activFun == "sigmoid"):
activationFunctions = Sigmoid()
else:
activationFunctions = Relu()
softmax_activation = Softmax_activation()
inputTaken = X
for i in range(len(self.Weights)):
dp = dotProduct.prop_forward(self.Weights[i], inputTaken)
zobt = adddBias.prop_forward(dp, self.biases[i])
inputTaken = activationFunctions.prop_forward(zobt)
return np.argmax(softmax_activation.predict(inputTaken), axis=1)
def train(self, X, y, activFun , max_passes=20000, epsilon=0.01, reg_lambda=0.01, loss_printYN=False):
dotProduct = WeightCrossData(); adddBias = AddBiasToProduct()
if(activFun == "tanh"):
activationFunctions = Tanh()
elif(activFun == "sigmoid"):
activationFunctions = Sigmoid()
else:
activationFunctions = Relu()
softmax_activation = Softmax_activation()
for iter in range(max_passes):
# perform forward propagation
inputTaken = X
prop_forward = [(None, None, inputTaken)]
for i in range(len(self.Weights)):
dp = dotProduct.prop_forward(self.Weights[i], inputTaken)
zobt = adddBias.prop_forward(dp, self.biases[i])
inputTaken = activationFunctions.prop_forward(zobt)
prop_forward.append((dp, zobt, inputTaken))
# Back propagation
dactivFunc = softmax_activation.calculate_diff(prop_forward[len(prop_forward)-1][2], y)
for n in range(len(prop_forward)-1, 0, -1):
add_descent = activationFunctions.prop_backward(prop_forward[n][1], dactivFunc)
bias_descent, mul_descent = adddBias.prop_backward(prop_forward[n][0], self.biases[n-1], add_descent)
weight_descnt, dactivFunc = dotProduct.prop_backward(self.Weights[n-1], prop_forward[n-1][2], mul_descent)
weight_descnt += reg_lambda * self.Weights[n-1]
self.biases[n-1] += -epsilon * bias_descent
self.Weights[n-1] += -epsilon * weight_descnt
if loss_printYN and iter % 1000 == 0:
print("Loss after iteration %i: %f" %(iter, self.findLoss(X, y,activFun)))
return self.Weights,self.biases;
|
nancyagrwal/Machine-Learning
|
Feed FOrward NN/feedForwardNeuralNetwork.py
|
feedForwardNeuralNetwork.py
|
py
| 3,721 |
python
|
en
|
code
| 0 |
github-code
|
6
|
74182195707
|
class Queue :
def __init__(self, list = None):
self.time = 0
if list == None:
self.items = []
else:
self.items = list
def dequeue(self):
if not self.isEmpty():
return self.items.pop(0)
else:
return -1
def enqueue(self, item):
self.items.append(item)
def isEmpty(self):
return len(self.items) == 0
def size(self):
return len(self.items)
def timeReset(self):
self.time = 0
def Time(self):
return self.time
def __str__(self) -> str:
return str(self.items)
def cashier():
if y.size() + 1 < 6:
y.enqueue(x.dequeue())
elif z.size() + 1 < 6:
z.enqueue(x.dequeue())
if not y.isEmpty():
y.time += 1
if not z.isEmpty():
z.time += 1
x = Queue()
y = Queue()
z = Queue()
inp = [*input("Enter people : ")]
x.items = inp
for i in range(1, len(inp)+1):
if not y.isEmpty() and y.time == 3:
y.dequeue()
y.timeReset()
if not z.isEmpty() and z.time == 2:
z.dequeue()
z.timeReset()
cashier()
print(i,x,y,z)
|
chollsak/KMITL-Object-Oriented-Data-Structures-2D
|
Queue/exercise2.py
|
exercise2.py
|
py
| 1,191 |
python
|
en
|
code
| 0 |
github-code
|
6
|
30433642050
|
import requests
from jSona import jSona
import pprint
pp = pprint.pprint
class proImagery :
def __init__(self, CONF_PATH) :
self.jso = jSona()
self.headers = {'json':{'Content-Type':'application/json; charset=utf-8'}}
self.config = self.jso.loadJson(CONF_PATH)['IMAGERY']
self.lower = {color['id']:color for color in self.jso.loadJson(self.config['COLOR']['PATH'])}
def post(self, data) :
return requests.post(url=self.post_url, data=self.jso.dumps(data, False), headers=self.headers['json'])
def connect(self, addr, port) :
self.url = "{}:{}/".format(addr, port)
self.post_url = self.url
temp_data = {'hello':'world'}
res = self.post(temp_data)
return 'success' in res.content.decode()
def segcolor(self, img_url, img_name='default.jpg', options='-d') :
self.post_url = self.url+'segcolor'
data = {'path' : img_url, 'name' : img_name, 'options' : options}
res = self.post(data)
return self.jso.loads(res.content.decode(), False)
def ambcolor(self, colors, threshold=0.1) :
colors = {color[0]:color[1] for color in colors}
ambics = dict()
for cid in colors : # brightgrayyellow
if cid in self.lower :
new_cid = self.lower[cid]['u']
if new_cid in ambics : ambics[new_cid] += colors[cid]
else : ambics[new_cid] = colors[cid]
else :
if cid in ambics : ambics[cid] += colors[cid]
else : ambics[cid] = colors[cid]
return list(filter(lambda c : c[1]>threshold, ambics.items()))
def start(self, img_url, labels=['shirt'], ambi=True, threshold=0.1, img_name='default.jpg', options='-d') :
segments_and_colors = self.segcolor(img_url, img_name=img_name, options=options)
if type(segments_and_colors) == type([]) :
segments, colors = segments_and_colors[1], segments_and_colors[3]
for sinx in range(len(segments)) :
if set(labels)&set(self.config['LABEL'][''.join(segments[sinx].split()[:-1])]) and sinx<len(colors):
if ambi : return self.ambcolor(colors[sinx], threshold=threshold)
else : return colors[sinx]
return None
|
oimq/proCleaner
|
proCleaner/proImagery.py
|
proImagery.py
|
py
| 2,349 |
python
|
en
|
code
| 0 |
github-code
|
6
|
39669765080
|
import hashlib
import os.path
import sys
import pickle
from SuperStoreParser import Parser as ssParser
from SheetsController import SheetsController
def main():
# Ensure usage is correct
if (len(sys.argv) != 2):
print("usage: python3 FoodParser.py inputfile.txt")
return
if (not os.path.exists(sys.argv[1])):
print("Could not find file \"" + sys.argv[1] + "\"")
return
# Set up configuration file paths
sheetId = ""
with open('secret/sheetId.txt') as sheet:
sheetId = sheet.read().strip()
tokenPath = 'secret/token.pickle'
credPath = 'secret/credentials.json'
print("Building sheets controller...")
# Build sheetsController
sheet = SheetsController(tokenPath, credPath, sheetId)
# Build list of headers
headers = [
'Food',
'MFP Name',
'Verified',
'Link',
'Unit',
'Buying Quantity',
'Buying Cost',
'Serving Quantity',
'$ / unit',
'Serving Calories',
'Serving Fat',
'Serving Carbs',
'Serving Protein',
'g Protein / Calorie',
'g Protein / $',
'Protein Score',
'Normalized Protein Score',
'g Carbs / $',
'g Fat / $',
'Calories / $'
]
# Clear out sheet
# sheet.clearSheet()
# Fill in headers
# sheet.makeHeaders(headers)
# Get links already in table
print('Getting already populated urls...')
doneUrls = sheet.getUrls()
print('Getting urls from file...')
urls = []
with open(sys.argv[1]) as urlFile:
urls = [i.strip() for i in urlFile.readlines()]
print('Filtering out already populated urls...')
urls = [i for i in urls if i not in doneUrls]
print(str(len(urls)) + " new urls.")
# Build superstore parser
print("Building SuperStoreParser...")
ssp = ssParser()
caching = True
debugging = True
data = []
print(len(urls))
for i in range(len(urls)):
print('yoinking page...(' + str(i+1) + '/'
+ str(len(urls)) + ')')
url = urls[i]
html = None
if caching:
cachePath = 'cache/'
cachePath += hashlib.md5(url.encode()).hexdigest()
cachePath += '.pickle'
if(not os.path.exists(cachePath)):
html = ssp.gethtml(url)
with open(cachePath, 'wb') as cache:
pickle.dump(html, cache)
else:
with open(cachePath, 'rb') as cache:
html = pickle.load(cache)
else:
html = ssp.gethtml(url)
data.append((html,url))
ssp.closeWindow()
parsedData = []
for i in range(len(data)):
print('parsing data...(' + str(i+1) + '/' + str(len(data)) + ')')
pair = data[i]
parsed = None
if caching:
cachePath = 'cache/'
cachePath += hashlib.md5(pair[1].encode()).hexdigest()
cachePath += '.parsed.pickle'
if not os.path.exists(cachePath):
parsed = ssp.parse(pair[0], pair[1])
with open(cachePath, 'wb') as cache:
pickle.dump(parsed, cache)
else:
with open(cachePath, 'rb') as cache:
parsed = pickle.load(cache)
else:
parsed = ssp.parse(pair[0], pair[1])
parsedData.append(parsed)
print('Writing to sheets...')
# Insert items
sheet.insertFoods(parsedData)
if __name__ == '__main__':
main()
|
haondt/531fitness
|
foodparser/FoodParser.py
|
FoodParser.py
|
py
| 2,981 |
python
|
en
|
code
| 0 |
github-code
|
6
|
13116301687
|
l=[]
r=int(input('enter no.of rows: '))
c=int(input('enter no.of columns: '))
for i in range(r):
row=[]
for j in range(c):
elem=int(input('Element'+str(i)+','+str(j)+':'))
row.append(elem)
l.append(row)
print('list: ')
print('l=[')
for i in range(r):
print('\t[',end='')
for j in range(c):
print(l[i][j],end=' ')
print(']')
print(' ]')
|
ParthivSen/Python_Programming
|
2D list.py
|
2D list.py
|
py
| 402 |
python
|
en
|
code
| 0 |
github-code
|
6
|
33340636168
|
from wsgiref.simple_server import make_server
from pyramid.view import view_config
from pyramid.config import Configurator
@view_config(route_name='theroute', renderer='json',request_method='POST')
def myview(request):
import pdb; pdb.set_trace()
return {'POST':''}
if __name__ == '__main__':
config = Configurator()
config.add_route('theroute', '/')
config.scan()
app = config.make_wsgi_app()
server = make_server('0.0.0.0', 6543, app)
print(server.base_environ)
server.serve_forever()
|
Yatish04/MicrosoftBackend
|
testscripts/pyramidserver.py
|
pyramidserver.py
|
py
| 524 |
python
|
en
|
code
| 0 |
github-code
|
6
|
24991083298
|
from tkinter import *
root = Tk()
root.title('ERIC PY')
root.geometry("500x400")
def grab():
my_label.config(text=my_spin.get())
names = ("j","T","M","N")
# my_spin = Spinbox(root, from_=0, to=10, increment=2, font=("helvetica",20))
# my_spin = Spinbox(root, values=("j","T","M","N") ,font=("helvetica",20))
my_spin = Spinbox(root, values=names ,font=("helvetica",20))
my_spin.pack(pady=20)
my_button = Button(root, text="submit", command=grab)
my_button.pack(pady=20)
my_label = Label(root, text="")
my_label.pack(pady=20)
root.mainloop()
|
miraceti/tkinter
|
gui_98tk_Spinboxes.py
|
gui_98tk_Spinboxes.py
|
py
| 550 |
python
|
en
|
code
| 2 |
github-code
|
6
|
75187431228
|
from django.conf import settings
import jwt
from rest_framework import authentication, exceptions
from django.contrib.auth.models import User
class JWTAuthentication(authentication.BasicAuthentication):
def authenticate(self, request):
auth_data = authentication.get_authorization_header(request)
if not auth_data:
return None
prefix, token = auth_data.decode('utf-8').split(' ')
try:
payload = jwt.decode(token, settings.JWT_SECRET, algorithms=['HS256'])
user = User.objects.get(id=payload['id'])
return (user, token)
except jwt.DecodeError:
raise exceptions.AuthenticationFailed('Invalid token')
except jwt.ExpiredSignatureError:
raise exceptions.AuthenticationFailed('Expired token')
|
Limookiplimo/Contacts-API
|
authentication/backends.py
|
backends.py
|
py
| 835 |
python
|
en
|
code
| 1 |
github-code
|
6
|
24458342362
|
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
X = rs_train[['price_per_ticket', 'seat_row', 'paid_full_price', 'paid_online',
'Regular_Ticket', 'age', 'est_income', 'Male', 'married', 'fam_w_kids',
'kids_in_house', 'from_Boston', 'from_MA','game_hour',
'RS_v_Yankees', 'STH_Act', 'BUSINESS_STH_Act',
'GROUP_Act', 'BUSINESS_act', 'Individual_Act', 'SPONSOR_Act',
'EMPLOYEE_Act', 'April', 'May', 'June', 'July', 'August', 'September',
'Thursday', 'Sunday', 'Tuesday', 'Wednesday', 'Saturday', 'Friday',
'Monday', 'low_scale_seat', 'med_scale_seat', 'high_scale_seat']]
y = rs_train['ticket_used']
names = pd.DataFrame(X.columns)
model_fs = SelectKBest(score_func = chi2, k=4)
fs_results = model_fs.fit(X,y)
#print(fs_results.scores_)
results_df = pd.DataFrame(fs_results.scores_)
scored= pd.concat([names, results_df], axis=1)
scored.columns = ["Feature", "Score"]
scored.sort_values(by=["Score"])
|
befitz/ISOM837_RS
|
feature_selection.py
|
feature_selection.py
|
py
| 996 |
python
|
en
|
code
| 0 |
github-code
|
6
|
6183714269
|
# coding:utf-8
# @Time : 2023/10/27 19:37
# @Author : 郑攀
# @File : filter.py
# @Software : PyCharm
import csv
filter_feature = []
with open("Output/filter feature.csv", "r", encoding='utf8') as f: # 打开文件
lines = csv.reader(f)
for line in lines:
filter_feature.append(line[0])
seed_feature = []
with open("Output/feature triples_ls.csv", "r", encoding='utf8') as f: # 打开文件
lines = csv.reader(f)
for line in lines:
seed_feature.append(line)
fp = open('Output/feature triples_filter.csv', "w+", encoding='utf8', newline='')
write = csv.writer(fp)
number = 0
for i in range(len(seed_feature)):
if seed_feature[i][0] + seed_feature[i][1] in filter_feature:
write.writerow(seed_feature[i])
number += 1
print(number)
|
PanZheng-2021/2022.0333_11.8
|
src/Review Feature Extraction/filter.py
|
filter.py
|
py
| 791 |
python
|
en
|
code
| 0 |
github-code
|
6
|
72248624827
|
class TreeNode:
def __init__(self,data):
self.data = data
self.left = None
self.right = None
class BST:
def __init__(self):
self.root = None
#def
def isEmpty(self):
return self.root is None
#def
def clear(self):
self.root = []
#def
def searchX(self,x):
curr = self.root
while curr is not None:
if x == curr.data:
return True
elif x < curr.data:
if curr.left is not None:
curr = curr.left
else:
return False
elif x > curr.data:
if curr.right is not None:
curr = curr.right
else:
return False
#def
def insertX(self,x):
new_node = TreeNode(x)
if self.root is None:
self.root = new_node
else:
curr = self.root
while curr is not None:
if x < curr.data:
if curr.left is not None:
curr = curr.left
else:
curr.left = new_node
break
elif x > curr.data:
if curr.right is not None:
curr = curr.right
else:
curr.right = new_node
break
#def
def breadth(self):
if self.root is None:
return
queue = [self.root]
while queue:
node = queue.pop(0)
print(node.data,end=' ')
if node.left is not None:
queue.append(node.left)
if node.right is not None:
queue.append(node.right)
#def
def preorder(self, p):
if p is None:
return
print(p.data, end=" ")
self.preorder(p.left)
self.preorder(p.right)
#def
def postorder(self, p):
if p is None:
return
self.postorder(p.left)
self.postorder(p.right)
print(p.data,end=" ")
#def
def inorder(self, p):
if p is None:
return
self.inorder(p.left)
print(p.data,end=" ")
self.inorder(p.right)
#def
def count(self):
return self._count_helper(self.root)
def _count_helper(self,node):
if node is None:
return 0
return 1 + self._count_helper(node.left) + self._count_helper(node.right)
#def
def dele(self, x):
if not self.searchX(x):
return # Node does not exist
self.root = self._delete_helper(self.root, x)
def _delete_helper(self, node, x):
if node is None:
return None
if x < node.data:
node.left = self._delete_helper(node.left, x)
elif x > node.data:
node.right = self._delete_helper(node.right, x)
else:
if node.left is None:
return node.right
elif node.right is None:
return node.left
else:
min_right = self._find_min(node.right)
node.data = min_right.data
node.right = self._delete_helper(node.right, min_right.data)
return node
#def
def find_min(self):
if self.root is None:
return None
return self._find_min(self.root)
def _find_min(self,node):
while node.left is not None:
node = node.left
return node
#def
def find_max(self):
if self.root is None:
return None
return self._find_max(self.root)
def _find_max(self, node):
while node.right is not None:
node = node.right
return node
#def
def sum(self):
return self._sum_helper(self.root)
def _sum_helper(self,node):
if node is None:
return 0
return node.data + self._sum_helper(node.left) + self._sum_helper(node.right)
#def
def avg(self):
node_count = self.count()
if node_count == 0:
return 0
tree_sum = self.sum()
return tree_sum / node_count
#def
def height(self):
return self._height_helper(self.root)
def _height_helper(self,node):
if node is None:
return -1
left_height = self._height_helper(node.left)
right_height = self._height_helper(node.right)
return max(left_height,right_height) + 1
#def
def cost_of_most_expensive_path(self):
return self.cost_helper(self.root)
def cost_helper(self,node):
if node is None:
return 0
left_cost = node.data + self.cost_helper(node.left)
right_cost = node.data + self.cost_helper(node.right)
return max(left_cost,right_cost)
#def
def is_AVL(self):
return self._is_AVL_helper(self.root)
def _is_AVL_helper(self, node):
if node is None:
return True
left_height = self._height_helper(node.left)
right_height = self._height_helper(node.right)
if abs(left_height - right_height) > 1:
return False
return self._is_AVL_helper(node.left) and self._is_AVL_helper(node.right)
#def
def is_heap(self):
return self._is_heap_helper(self.root)
def _is_heap_helper(self,node):
if node is None:
return True
if node.left is not None and node.left.data > node.data:
return False
if node.right is not None and node.right.data > node.data:
return False
return self._is_heap_helper(node.left) and self._is_heap_helper(node.right)
# Testing the Binary Search Tree implementation
tree = BST()
# Test isEmpty()
print("Is tree empty?", tree.isEmpty())
# Test insert()
tree.insertX(5)
tree.insertX(3)
tree.insertX(7)
tree.insertX(1)
tree.insertX(4)
tree.insertX(6)
tree.insertX(8)
# Test breadth()
print("Breadth traversal:")
tree.breadth()
# Test search()
print("\nSearch 4:", tree.searchX(4)) # Output: <__main__.TreeNode object at 0x...>
print("Search 9:", tree.searchX(9)) # Output: None
# Test preorder()
print("\nPreorder traversal:")
tree.preorder(tree.root) # Output: 5 3 1 4 7 6 8
# Test postorder()
print("\npostorder traversal:")
tree.postorder(tree.root) # Output: 1 4 3 6 8 7 5
# Test inorder()
print("\ninorder traversal:")
tree.inorder(tree.root) # Output: 1 3 4 5 6 7 8
# Test count()
print("\nCount number of nodes:", tree.count()) # Output: 7
# Delete a node
tree.dele(3)
# Test breadth() after deletion
print("Breadth traversal after deletion:")
tree.breadth() # Output: 5 4 7 1 6 8
# Find the maximum value in the tree
max_node = tree.find_max()
if max_node is not None:
print("\nMaximum value:", max_node.data)
else:
print("Tree is empty.")
# Find the minimum value in the tree
min_node = tree.find_min()
if min_node is not None:
print("minimum value:", min_node.data)
else:
print("Tree is empty.")
#Caculate sum of the tree
print("Sum of all values in the tree:", tree.sum())
#Caculate average of the tree
print("Average of all values in the tree:", tree.avg())
# Calculate the height of the tree
tree_height = tree.height()
print("Height of the tree:", tree_height)
# Calculate the cost of the most expensive path
cost = tree.cost_of_most_expensive_path()
print("Cost of the most expensive path:", cost)
# Check if the tree is AVL
is_avl = tree.is_AVL()
print(is_avl) # Output: True
# Kiểm tra cây root có phải là heap hay không
print(tree.is_heap()) # Kết quả: True
|
phamduclong2103/CSD203
|
LAB/LAB4.py
|
LAB4.py
|
py
| 7,991 |
python
|
en
|
code
| 0 |
github-code
|
6
|
34215956791
|
#state rates
statetaxrate = 0.1120
#anne arundel county rates
#New Proposed for FY24 superceeds this - annearundeltaxrate = .93300
annearundeltaxrate = .98000
annearundelsolidwaste = 341
annearundelstormwater = 35.70
annapoliscountyrate = 0.559000
annapolisrate = 0.738000
|
dataguy2020/PropertyTax
|
rates.py
|
rates.py
|
py
| 275 |
python
|
en
|
code
| 1 |
github-code
|
6
|
41711840918
|
import numpy as np
from sklearn.model_selection import KFold, TimeSeriesSplit
from helpers.settings import *
from model.preprocessing_helper import *
from model.config import HISTORY_SIZE
from datetime import datetime
LSTM_STEP = 1
LSTM_FUTURE_TARGET = 1
LSTM_HISTORY = HISTORY_SIZE
TRAIN_DATASET_FRAC = 0.8
def generate_lstm_data(path, cols=INPUT_FILE_COLUMNS, target_column=TARGET_COLUMN, norm_cols=NORM_COLS,
history_size=LSTM_HISTORY, target_size=LSTM_FUTURE_TARGET,
step=LSTM_STEP, index_col=DATETIME_COLUMN,
filter_cols=None,
cat_cols=CATEGORIES, adjust_cols=ADJUST_COLUMNS,
scale_cols=SCALE_COLS, extra_columns=EXTRA_COLS):
"""
:param path: string - path to file
:param cols: List[string] list of all columns to be extracted from csv file
:param target_column: string - name of the target column
:param norm_cols: Dict[Dict[mu: float, std: float]] - list of columns to normalize
:param history_size: int - how many previous records should we use for LSTM dataset
:param target_size: int - how many outputs do we have (usually 1)
:param step: int - if multioutput then >1 else 1
:param index_col: string - name of the timeseries column
:param filter_cols: Dict[List[any]] - filters colums from Dict keys by list of values from the List
:param cat_cols: Dict[List[string]] - definition of all categorical data
:param adjust_cols: Dict[Dict[amount: float]] - amount added to each col value
:param scale_cols: Dict[Dict[min: float, max: float]] - list of columns to scale <0,1>
:param extra_columns: List[string] - list of columns to copy without changing
:return: Tuple(np.array, np.array)
"""
dataset = pd.read_csv(path, usecols=cols)
if target_column not in dataset.columns:
dataset[target_column] = pd.Series(np.zeros(len(dataset[index_col])), index=dataset.index)
dataset.index = dataset[index_col]
# test_y = dataset[dataset[target_column] > 0]
# print(test_y.describe())
# print(dataset.describe())
if filter_cols is not None:
for key, value in filter_cols.items():
dataset = dataset[dataset[key].isin(value)]
dataset['day_of_year'] = dataset[index_col].apply(lambda x: datetime.fromtimestamp(x).timetuple().tm_yday / 365)
cols_to_extract = ['day_of_year'] + list(adjust_cols.keys()) + list(
scale_cols.keys()) + list(norm_cols.keys()) + extra_columns + [target_column]
# print(cols_to_extract)
# print(dataset.columns)
# print(dataset[target_column].describe(), filter_cols)
dataset = preproc_data(
dataset[cols_to_extract],
norm_cols=norm_cols,
scale_cols=scale_cols,
adjust_cols=adjust_cols
)
# parse dataset to its values only, we don't need pandas for future processing from this point
dataset = dataset.values
# print(dataset[:5])
proposed_x, proposed_y = generate_multivariate_data(dataset, target_index=-1, history_size=history_size,
target_size=target_size, step=step)
# print(np.sum(proposed_y))
return proposed_x, proposed_y
def generate_multivariate_data(dataset, history_size=LSTM_HISTORY, target_size=LSTM_FUTURE_TARGET,
step=LSTM_STEP, target_index=-1, target=None):
"""
:param dataset: np.array
:param history_size: int - how many previous records should we use for LSTM dataset
:param target_size: int - how many outputs do we have (usually 1)
:param step: int - if multioutput then >1 else 1
:param target_index: int - index of the target column
:param target: np.array - should be set if dataset doesn't contain target
:return: Tuple(np.array, np.array)
"""
# if there is no explicit target when get target from dataset
if target is None:
target = dataset[:, target_index]
dataset = dataset[:, :target_index]
dataset_size = len(dataset)
train_to_idx = dataset_size - target_size
start_train_idx = history_size
data = []
labels = []
for i in range(start_train_idx, train_to_idx):
indices = range(i - history_size, i, step)
data.append(dataset[indices])
labels.append(target[i + target_size])
return np.array(data), np.array(labels)
def k_fold_data(x, y, folds=10):
x_train = []
y_train = []
x_test = []
y_test = []
kfold = KFold(n_splits=folds, shuffle=True)
for train_index, test_index in kfold.split(x, y):
x_train = x[train_index]
y_train = y[train_index]
x_test = x[test_index]
y_test = y[test_index]
return x_train, y_train, x_test, y_test
def k_fold_ts_data(x, y, folds=10):
x_train = []
y_train = []
x_test = []
y_test = []
kfold = TimeSeriesSplit(n_splits=folds)
for train_index, test_index in kfold.split(x, y):
x_train = x[train_index]
y_train = y[train_index]
x_test = x[test_index]
y_test = y[test_index]
return x_train, y_train, x_test, y_test
|
burnpiro/wod-usage-predictor
|
model/data_preprocessor.py
|
data_preprocessor.py
|
py
| 5,157 |
python
|
en
|
code
| 0 |
github-code
|
6
|
42647237351
|
from django.contrib.auth import get_user_model
from django.forms import widgets
from root import forms_override as forms
from root.models import UserGroup, EmailTemplate, EmailGroup, Experiment
INITIAL_EMAIL_TEMPLATE = """<html>
<body>
<h1>{{content}}</h1>
</body>
</html>
"""
class RegistrationForm(forms.Form):
first_name = forms.CharField()
last_name = forms.CharField()
email = forms.EmailField()
username = forms.CharField()
password = forms.CharField(
min_length=5,
widget=widgets.PasswordInput(attrs={'class': 'form-control'})
)
group = forms.CharField()
def create_user(self):
data = self.cleaned_data
return get_user_model().objects.create_user(
data['username'],
email=data['email'],
password=data['password'],
first_name=data['first_name'],
last_name=data['last_name']
)
def create_group(self, user):
group = UserGroup.objects.create(
name=self.cleaned_data['group']
)
group.users.add(user)
return group
def save(self):
user = self.create_user()
group = self.create_group(user)
return user, group
class EmailGroupSetUpForm(forms.Form):
user_group = forms.ChoiceField()
group_name = forms.CharField()
name = forms.CharField()
content = forms.CharField(
initial=INITIAL_EMAIL_TEMPLATE,
widget=widgets.Textarea(attrs={'class': 'form-control'})
)
content_type = forms.ChoiceField(
choices=(
('text/html; charset=UTF-8', 'text/html; charset=UTF-8'),
('text/plain', 'text/plain')
)
)
def __init__(self, user, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['user_group'] = forms.ChoiceField(
choices=UserGroup.objects.filter(users=user).values_list('id', 'name')
)
def create_email_group(self):
return EmailGroup.objects.create(
name=self.cleaned_data['group_name'],
group_id=self.cleaned_data['user_group'],
)
def create_template(self, group):
return EmailTemplate.objects.create(
group=group,
name=self.cleaned_data['name'],
content=self.cleaned_data['content'],
content_type=self.cleaned_data['content_type']
)
def save(self):
group = self.create_email_group()
template = self.create_template(group)
return group, template
class ExperimentModelForm(forms.ModelForm):
name = forms.CharField()
chance = forms.IntegerField(min_value=0, max_value=100, initial=50)
start_time = forms.DateTimeField(required=False)
end_time = forms.DateTimeField(required=False)
is_active = forms.BooleanField(required=False)
def __init__(self, user, *args, **kwargs):
super().__init__(*args, **kwargs)
query = EmailTemplate.objects.filter(group__group__users__in=[user])
self.fields['choice_a'] = forms.ModelChoiceField(
queryset=query
)
self.fields['choice_b'] = forms.ModelChoiceField(
queryset=query
)
self.fields['email_group'] = forms.ModelChoiceField(
queryset=EmailGroup.objects.filter(group__users__in=[user]),
widget=forms.HiddenInput()
)
class Meta:
model = Experiment
fields = [
'name', 'chance',
'choice_a', 'choice_b',
'start_time', 'end_time',
'is_active', 'email_group'
]
class EmailTemplateModelForm(forms.ModelForm):
name = forms.CharField()
subject = forms.CharField(initial='Subjects are also a template: {{content}}')
content = forms.CharField(
initial=INITIAL_EMAIL_TEMPLATE,
widget=widgets.Textarea(attrs={'class': 'form-control'})
)
content_type = forms.ChoiceField(
choices=(
('text/html; charset=UTF-8', 'text/html; charset=UTF-8'),
('text/plain', 'text/plain')
)
)
preview_data = forms.CharField(
widget=widgets.Textarea(attrs={'class': 'form-control'}),
initial='{"content": "hello-world"}'
)
def __init__(self, user, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['group'] = forms.ModelChoiceField(
queryset=EmailGroup.objects.filter(group__users__in=[user]),
widget=forms.HiddenInput()
)
class Meta:
model = EmailTemplate
fields = [
'name', 'group',
'content', 'content_type',
'preview_data'
]
|
ograycode/engage
|
root/forms.py
|
forms.py
|
py
| 4,691 |
python
|
en
|
code
| 0 |
github-code
|
6
|
39259219902
|
#!/usr/bin/env python3
import rclpy
from rclpy.node import Node
import speech_recognition as sr
from custom_if.srv import SendSentence
from functools import partial
from nl_understanding.tcp_client import GenericClient
### Node class
class NLUnderstanding(Node):
def __init__(self):
super().__init__("nlu_node")
self.get_logger().info("NLU node is up.")
self.client = GenericClient(host='localhost', port=50000)
# Service
self.server = self.create_service(SendSentence, 'send_command', self.callback_command)
# Server callbacks
def callback_command(self, request, response):
self.client.send_request(1)
self.get_logger().info("Request sent.")
response.done = True
return response
def main(args=None):
rclpy.init(args=args)
node = NLUnderstanding()
rclpy.spin(node)
rclpy.shutdown()
if __name__ == "__main__":
main()
|
Alessandro-Scarciglia/VoiceAssistant
|
nl_understanding/nl_understanding/nl_understanding.py
|
nl_understanding.py
|
py
| 854 |
python
|
en
|
code
| 0 |
github-code
|
6
|
34773865480
|
import time
from functools import cache
from math import radians
from XivCombat.utils import a, s, cnt_enemy, res_lv, find_area_belongs_to_me
from XivCombat.strategies import *
from XivCombat import define, api
from XivCombat.multi_enemy_selector import Rectangle, NearCircle, circle, FarCircle
aoe = NearCircle(5.)
fire_aoe = FarCircle(20, 5)
TEN = 3 # 天
CHI = 2 # 地
JIN = 1 # 人
def get_mudra(effects: dict):
if s('结印') not in effects:
return ""
p = effects[s('结印')].param
s = ''
for i in range(4):
m = ( p >> (i * 2)) & 0b11
if m:
s += str(m)
else:
break
return s
m2s = {
TEN: 2259,
CHI: 2261,
JIN: 2263,
}
def c(*mudras: int):
return [m2s[m] for m in mudras]
combos = {
'normal': c(TEN),
'fire': c(CHI, TEN),
'thunder': c(TEN, CHI),
'ice': c(TEN, JIN),
'wind': c(JIN, CHI, TEN),
'ground': c(JIN, TEN, CHI),
'water': c(TEN, CHI, JIN),
'water_multi': c(CHI, TEN, JIN),
}
class NinjaStrategy(Strategy):
name = 'ot/nin'
job = 'Ninja'
def __init__(self):
self.effects_temp = dict()
self.combo = []
def have_effect(self, data: 'LogicData', effect_id: int, allow_time=2):
return effect_id in data.effects or self.effects_temp.setdefault(effect_id, 0) > time.time() - allow_time
def set_effect(self, effect_id: int):
self.effects_temp[effect_id] = time.time()
def can_ground(self, data: 'LogicData'):
return not self.have_effect(data, s('土遁之术'), 5)
def get_ground(self):
self.set_effect(s('土遁之术'))
return combos['ground'].copy()
def common(self, data: 'LogicData') -> UseAbility| UseItem| UseCommon| None:
if data.gcd < 0.3 and not self.combo:
combo_use = data.config.custom_settings.setdefault('ninja_combo', '')
if combo_use in combos:
if combo_use == "ground": self.set_effect(s('土遁之术'))
data.config.custom_settings['ninja_combo'] = ''
self.combo = combos[combo_use].copy()
if self.combo:
return UseAbility(self.combo.pop(0))
elif s('结印') in data.effects:
return UseAbility(a('忍术(NIN)'))
def global_cool_down_ability(self, data: 'LogicData') -> UseAbility | UseItem | UseCommon | None:
must_huton = data.gauge.huton_ms < 10*1000 and data.gauge.huton_ms and data.max_ttk > data.gauge.huton_ms/1000
if data.target_distance <= 3:
single_target = data.target
else:
single_target = data.get_target(define.DISTANCE_NEAREST, data.enemy_can_attack_by(a('飞刀(NIN)')))
if not single_target:
return
if data.actor_distance_effective(single_target) > 5:
if not data.gcd and data.me.level >= 15:
return UseAbility(a('飞刀(NIN)'), single_target.id)
else:
None
if data.me.level >= 35:
fire_aoe_target, fire_aoe_cnt = cnt_enemy(data, fire_aoe)
else:
fire_aoe_target, fire_aoe_cnt = data.target, 0
if data.me.level >= 38:
aoe_target, aoe_cnt = cnt_enemy(data, aoe)
else:
aoe_target, aoe_cnt = data.me, 0
if s('生杀予夺') in data.effects:
if data.me.level >= 76:
if aoe_cnt > 2:
self.combo = combos['fire'].copy()
else:
self.combo = combos['ice'].copy()
elif aoe_cnt > 2:
if data.max_ttk > 15 and self.can_groun(data):
self.combo = self.get_ground()
else:
self.combo = combos['fire'].copy()
else:
self.combo = combos['thunder'].copy()
elif s('天地人') in data.effects:
if aoe_cnt < 3:
self.combo = combos['water'].copy()
else:
if self.can_ground(data):
self.combo = self.get_ground()
else:
self.combo = combos['water_multi'].copy()
elif data[a('天之印(NIN)(0)')] <= 20 and data.skill_unlocked(a('天之印(NIN)(0)')):
if data.me.level >= 45 and data.skill_unlocked(a('人之印(NIN)(0)')):
if not data.gauge.huton_ms:
self.combo = combos['wind'].copy()
else:
if (data[a('天之印(NIN)(0)')] < 20 or not data[a('命水(NIN)')]) and s('水遁之术') not in data.effects and (data[a('天之印(NIN)')] < 5 or data[a('攻其不备(NIN)')] < 5 or data.target_distance > 6):
self.combo = combos['water'].copy()
elif aoe_cnt > 2 and data.max_ttk > 15 and self.can_ground(data):
self.combo = self.get_ground()
if not self.combo and (data[a('天之印(NIN)(0)')] < 5 or data.target_distance > 6):
if data.me.level >= 35:
if fire_aoe_cnt > 2:
self.combo = combos['fire'].copy()
else:
self.combo = combos['thunder'].copy()
else:
self.combo = combos['normal'].copy()
if self.combo:
return UseAbility(self.combo.pop(0))
if data.combo_id == a('血雨飞花(NIN)') and data.skill_unlocked(a('八卦无刃杀(NIN)')) and aoe_cnt:
return UseAbility(a('八卦无刃杀(NIN)'), data.me.id)
if aoe_cnt > 2:
return UseAbility(a('血雨飞花(NIN)'), data.me.id)
if data.actor_distance_effective(single_target) > 3:
return
if data.me.level >= 54 and data.combo_id == a('绝风(NIN)') and data.skill_unlocked(a('强甲破点突(NIN)')):
if must_huton:
UseAbility(a('强甲破点突(NIN)'), single_target.id)
if data.me.level >= 26 and data.combo_id == a('绝风(NIN)'):
return UseAbility(a('旋风刃(NIN)'), single_target.id)
if data.me.level >= 4 and data.combo_id == a('双刃旋(NIN)'):
return UseAbility(a('绝风(NIN)'), single_target.id)
return UseAbility(a('双刃旋(NIN)'), single_target.id)
# def non_global_cool_down_ability(self, data: 'LogicData') -> UseAbility | UseItem | UseCommon | None:
|
ShoOtaku/f3combat
|
OT/nin.py
|
nin.py
|
py
| 6,428 |
python
|
en
|
code
| 1 |
github-code
|
6
|
15855579751
|
from appium import webdriver
from appium.webdriver.common.mobileby import MobileBy
from selenium.webdriver.common.by import By
class TestWeworkTest:
def setup(self):
desired_cap = {}
desired_cap["platformName"] = "Android"
# desired_cap["platformVersion"] = "6.0"
desired_cap["deviceName"] = "127.0.0.1:62001"
desired_cap["appPackage"] = "com.tencent.wework"
desired_cap["appActivity"] = ".launch.WwMainActivity"
desired_cap["noReset"] = True
desired_cap["skipServerInstallation"] = True # 跳过uiautomarot2安装
desired_cap["skipDeviceInitialization"] = True # 跳过设备初始化
# desired_cap["unicodeKeyBoard"] = 'true'
# desired_cap["restKeyBoard"] = 'true'
desired_cap["settings[waitForIdleTimeout]"] = 0 # 等待页面完全加载完成的时间
desired_cap["dontStopAppOnReset"] = True # 等待页面完全加载完成的时间
self.drive = webdriver.Remote('http://127.0.0.1:4723/wd/hub', desired_cap)
self.drive.implicitly_wait(5)
def teardown(self):
self.drive.quit()
def swipe_find(self, by_method, ele, num=3):
# 滑动查找元素
for i in range(num):
try:
rst_ele = self.drive.find_element(by_method, ele)
return rst_ele
except:
windows_size = self.drive.get_window_size()
width = windows_size.get('width')
height = windows_size.get('height')
start_width = width / 2
start_height = height * 0.8
end_width = width / 2
end_height = height * 0.3
self.drive.swipe(start_width, start_height, end_width, end_height)
def test_add_member(self):
self.drive.find_element(MobileBy.XPATH, "//*[@text='工作台']").click()
# 滑动查找打卡选项
# self.drive.find_element_by_android_uiautomator('new UiScrollable(new UiSelector().'
# 'scrollable(true).instance(0)).'
# f'scrollIntoView(new UiSelector().text("打卡")'
# ');').click()
# self.drive.find_element(MobileBy.XPATH, "//*[@text='打卡']").click()
self.swipe_find(MobileBy.XPATH, "//*[@text='打卡']").click()
# self.drive.update_settings({"waitForIdleTimeout": 0}) # driver方法更新等待页面完全加载完成的时间
self.drive.find_element(MobileBy.XPATH, "//*[@text='外出打卡']").click()
self.drive.find_element(MobileBy.XPATH, "//*[contains(@text,'次外出')]").click()
self.drive.find_element(MobileBy.XPATH, "//*[@text='外出打卡成功']")
# while True:
# current_xml = self.drive.page_source
# if "添加成功" in current_xml:
# print(current_xml)
# break
|
sunmings1310/HogwartsHomework
|
hogwarts-homework/AppWeworkHomework/test_clokc_in.py
|
test_clokc_in.py
|
py
| 3,015 |
python
|
en
|
code
| 0 |
github-code
|
6
|
74977703867
|
import re
import logging
import sys
import os
import yaml
from rdflib import ConjunctiveGraph, Literal, URIRef, BNode, Namespace
from dipper.graph.Graph import Graph as DipperGraph
from dipper.utils.CurieUtil import CurieUtil
from dipper import curie_map as curie_map_class
from dipper.models.BiolinkVocabulary import BioLinkVocabulary as blv
LOG = logging.getLogger(__name__)
class RDFGraph(DipperGraph, ConjunctiveGraph):
"""
Extends RDFLibs ConjunctiveGraph
The goal of this class is wrap the creation
of triples and manage creation of URIRef,
Bnodes, and literals from an input curie
"""
curie_map = curie_map_class.get()
curie_util = CurieUtil(curie_map)
# make global translation table available outside the ingest
with open(
os.path.join(
os.path.dirname(__file__),
'../../translationtable/GLOBAL_TERMS.yaml')) as fhandle:
globaltt = yaml.safe_load(fhandle)
globaltcid = {v: k for k, v in globaltt.items()}
def __init__(self, are_bnodes_skized=True, identifier=None):
# print("in RDFGraph with id: ", identifier)
super().__init__('IOMemory', identifier)
self.are_bnodes_skized = are_bnodes_skized
self.prefixes = set()
# Can be removed when this is resolved
# https://github.com/RDFLib/rdflib/issues/632
# 2020 oct. possibly fixed
# for pfx in ('OBO',): # , 'ORPHA'):
# self.bind(pfx, Namespace(self.curie_map[pfx]))
def _make_category_triple(
self, subject, category, predicate=blv.terms['category']
):
"""
add a triple to capture subject or object category (in CURIE form) that was
passed to addTriple()
"""
try:
self.add((
self._getnode(subject),
self._getnode(predicate),
self._getnode(category)))
except:
LOG.warning(
"Problem adding triple in _makeCategoryTriple for " + \
"subj: %s pred: %s obj(category): %s",
subject, predicate, category)
def _is_literal(self, thing):
"""
make inference on type (literal or CURIE)
return: logical
"""
if self.curie_regexp.match(thing) is not None or\
thing.split(':')[0].lower() in ('http', 'https', 'ftp'):
object_is_literal = False
else:
object_is_literal = True
return object_is_literal
def addTriple(
self,
subject_id,
predicate_id,
obj,
object_is_literal=None,
literal_type=None,
subject_category=None,
object_category=None
):
if object_is_literal is None:
object_is_literal = self._is_literal(obj)
# add triples for subject category info
if subject_category is not None:
self._make_category_triple(subject_id, subject_category)
# add triples for obj category info, if obj is not a literal
if not object_is_literal:
if object_category is not None:
self._make_category_triple(obj, object_category)
else: # emit warning if object category is given for a literal
if object_category is not None:
LOG.warning("I was given a category %s for obj: %s, " +
"which seems to be a literal!",
object_category, obj)
if object_is_literal is True:
if isinstance(obj, str):
re.sub(r'[\t\n\r\f\v]+', ' ', obj) # reduce any ws to a space
if literal_type is not None and obj is not None and obj not in ("", " "):
literal_type_iri = self._getnode(literal_type)
self.add(
(self._getnode(subject_id), self._getnode(predicate_id),
Literal(obj, datatype=literal_type_iri)))
elif obj is not None:
# could attempt to infer a type here but there is no use case
self.add((
self._getnode(subject_id), self._getnode(predicate_id),
Literal(obj)))
else:
LOG.warning(
"None as literal object for subj: %s and pred: %s",
subject_id, predicate_id)
# get a sense of where the None is comming from
# magic number here is "steps up the call stack"
# TODO there may be easier/ideomatic ways to do this now
for call in range(2, 0, -1):
LOG.warning(
'\t%sfrom: %s', '\t' * call, sys._getframe(call).f_code.co_name)
elif obj is not None and obj != '': # object is a resource
self.add((
self._getnode(subject_id),
self._getnode(predicate_id),
self._getnode(obj)))
else:
LOG.warning(
"None/empty object IRI for subj: %s and pred: %s",
subject_id, predicate_id)
def skolemizeBlankNode(self, curie):
stripped_id = re.sub(r'^_:|^_', '', curie, 1)
return URIRef(self.curie_map['BNODE'] + stripped_id)
def _getnode(self, curie):
"""
This is a wrapper for creating a URIRef or Bnode object
with a given a curie or iri as a string.
If an id starts with an underscore, it assigns it to a BNode, otherwise
it creates it with a standard URIRef.
Alternatively, self.skolemize_blank_node is True,
it will skolemize the blank node
:param curie: str identifier formatted as curie or iri
:return: node: RDFLib URIRef or BNode object
"""
node = None
if curie[0] == '_':
if self.are_bnodes_skized:
node = self.skolemizeBlankNode(curie)
else: # delete the leading underscore to make it cleaner
node = BNode(re.sub(r'^_:|^_', '', curie, 1))
# Check if curie string is actually an IRI
elif curie[:4] == 'http' or curie[:3] == 'ftp' or curie[:4] == 'jdbc':
node = URIRef(curie)
else:
iri = RDFGraph.curie_util.get_uri(curie)
if iri is not None:
node = URIRef(iri)
# Bind prefix map to graph
prefix = curie.split(':')[0]
self.prefixes.add(prefix)
else:
LOG.error("couldn't make URI for %s", curie)
# get a sense of where the CURIE-ish? thing is comming from
# magic number here is "steps up the call stack"
for call in range(3, 0, -1):
LOG.warning(
'\t%sfrom: %s', '\t' * call, sys._getframe(call).f_code.co_name)
return node
def bind_all_namespaces(self):
"""
Results in the RDF @prefix directives for every ingest
being added to this ingest.
"""
for prefix in self.curie_map.keys():
iri = self.curie_map[prefix]
self.bind(prefix, Namespace(iri))
# serialize() conflicts between rdflib & Graph.serialize abstractmethod
# GraphUtils expects the former. (too bad there is no multiple dispatch)
# rdflib version
def serialize(
self, destination=None, format='turtle', base=None, encoding=None
):
for prefix in self.prefixes:
mapped_iri = self.curie_map[prefix]
self.bind(prefix, Namespace(mapped_iri))
return ConjunctiveGraph.serialize(self, destination, format)
|
monarch-initiative/dipper
|
dipper/graph/RDFGraph.py
|
RDFGraph.py
|
py
| 7,720 |
python
|
en
|
code
| 53 |
github-code
|
6
|
2535030622
|
from pathlib import Path
from shutil import move
from threading import Thread
import logging
folders = []
extensions = []
def grabs_folder(path: Path):
for el in path.iterdir():
if el.is_dir():
folders.append(el)
grabs_folder(el)
def sort_file(path: Path):
for el in path.iterdir():
if el.is_file():
ext = el.suffix
new_path = base_folder / ext
try:
new_path.mkdir(exist_ok=True, parents=True)
move(el, new_path / el.name)
except OSError as e:
logging.error(e)
def del_empty_folders(path: Path):
for el in path.iterdir():
if el.is_dir():
try:
el.rmdir()
except OSError:
pass
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format="%(threadName)s %(message)s")
base_folder = Path(input('Type path to folder:'))
folders.append(base_folder)
grabs_folder(base_folder)
threads = []
for folder in folders:
th = Thread(target=sort_file, args=(folder,))
th.start()
threads.append(th)
[th.join() for th in threads]
del_empty_folders(base_folder)
print('The process has been finished successfully')
|
PetroChulkov/web_homework3
|
file_sorter.py
|
file_sorter.py
|
py
| 1,354 |
python
|
en
|
code
| 1 |
github-code
|
6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.